diff --git a/pinecone/__init__.py b/pinecone/__init__.py index f3909e0b..8ade68bc 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -1,15 +1,13 @@ """ .. include:: ../README.md """ -from pinecone.core.utils.constants import CLIENT_VERSION as __version__ from .config import * from .exceptions import * -from .info import * from .manage import * from .index import * try: - from .core.grpc.index_grpc import * + from .grpc.index_grpc import * except ImportError: pass # ignore for non-[grpc] installations diff --git a/pinecone/config.py b/pinecone/config.py index dfde4b97..f4bc276c 100644 --- a/pinecone/config.py +++ b/pinecone/config.py @@ -11,9 +11,8 @@ from urllib3.connection import HTTPConnection from pinecone.core.client.exceptions import ApiKeyError -from pinecone.core.api_action import ActionAPI, WhoAmIResponse -from pinecone.core.utils import warn_deprecated, check_kwargs -from pinecone.core.utils.constants import ( +from pinecone.utils import warn_deprecated, check_kwargs +from pinecone.utils.constants import ( CLIENT_VERSION, PARENT_LOGGER_NAME, DEFAULT_PARENT_LOGGER_LEVEL, @@ -93,21 +92,6 @@ def reset(self, config_file=None, **kwargs): self._config = config - # load project_name etc. from whoami api - action_api = ActionAPI(host=config.controller_host, api_key=config.api_key) - try: - whoami_response = action_api.whoami() - except requests.exceptions.RequestException: - # proceed with default values; reset() may be called later w/ correct values - whoami_response = WhoAmIResponse() - - if not self._config.project_name: - config = config._replace( - **self._preprocess_and_validate_config({"project_name": whoami_response.projectname}) - ) - - self._config = config - # Set OpenAPI client config default_openapi_config = OpenApiConfiguration.get_default_copy() default_openapi_config.ssl_ca_cert = certifi.where() @@ -239,7 +223,6 @@ def init( api_key: str = None, host: str = None, environment: str = None, - project_name: str = None, log_level: str = None, openapi_config: OpenApiConfiguration = None, config: str = "~/.pinecone", @@ -250,14 +233,12 @@ def init( :param api_key: Required if not set in config file or by environment variable ``PINECONE_API_KEY``. :param host: Optional. Controller host. :param environment: Optional. Deployment environment. - :param project_name: Optional. Pinecone project name. Overrides the value that is otherwise looked up and used from the Pinecone backend. :param openapi_config: Optional. Set OpenAPI client configuration. :param config: Optional. An INI configuration file. :param log_level: Deprecated since v2.0.2 [Will be removed in v3.0.0]; use the standard logging module to manage logger "pinecone" instead. """ check_kwargs(init, kwargs) Config.reset( - project_name=project_name, api_key=api_key, controller_host=host, environment=environment, diff --git a/pinecone/core/__init__.py b/pinecone/core/__init__.py index 8b137891..e69de29b 100644 --- a/pinecone/core/__init__.py +++ b/pinecone/core/__init__.py @@ -1 +0,0 @@ - diff --git a/pinecone/core/api_action.py b/pinecone/core/api_action.py deleted file mode 100644 index 12fc74f3..00000000 --- a/pinecone/core/api_action.py +++ /dev/null @@ -1,37 +0,0 @@ -from typing import NamedTuple -from pinecone.core.api_base import BaseAPI - -__all__ = ["ActionAPI", "VersionResponse", "WhoAmIResponse"] - -from pinecone.core.utils import get_version - - -class WhoAmIResponse(NamedTuple): - username: str = "UNKNOWN" - user_label: str = "UNKNOWN" - projectname: str = "UNKNOWN" - - -class VersionResponse(NamedTuple): - server: str - client: str - - -class ActionAPI(BaseAPI): - """User related API calls.""" - - client_version = get_version() - - def whoami(self) -> WhoAmIResponse: - """Returns user information.""" - response = self.get("/actions/whoami") - return WhoAmIResponse( - username=response.get("user_name", "UNDEFINED"), - projectname=response.get("project_name", "UNDEFINED"), - user_label=response.get("user_label", "UNDEFINED"), - ) - - def version(self) -> VersionResponse: - """Returns version information.""" - response = self.get("/actions/version") - return VersionResponse(server=response.get("version", "UNKNOWN"), client=self.client_version) diff --git a/pinecone/core/api_base.py b/pinecone/core/api_base.py deleted file mode 100644 index 93a3a7ae..00000000 --- a/pinecone/core/api_base.py +++ /dev/null @@ -1,35 +0,0 @@ -import requests -from requests.exceptions import HTTPError - - -class BaseAPI: - """Base class for HTTP API calls.""" - - def __init__(self, host: str, api_key: str = None): - self.host = host - self.api_key = api_key - - @property - def headers(self): - return {"api-key": self.api_key} - - def _send_request(self, request_handler, url, **kwargs): - response = request_handler("{0}{1}".format(self.host, url), headers=self.headers, **kwargs) - try: - response.raise_for_status() - except HTTPError as e: - e.args = e.args + (response.text,) - raise e - return response.json() - - def get(self, url: str, params: dict = None): - return self._send_request(requests.get, url, params=params) - - def post(self, url: str, json: dict = None): - return self._send_request(requests.post, url, json=json) - - def patch(self, url: str, json: dict = None): - return self._send_request(requests.patch, url, json=json) - - def delete(self, url: str): - return self._send_request(requests.delete, url) diff --git a/pinecone/core/client/__init__.py b/pinecone/core/client/__init__.py index 584d1219..3e4f7ad7 100644 --- a/pinecone/core/client/__init__.py +++ b/pinecone/core/client/__init__.py @@ -1,28 +1,65 @@ +# coding: utf-8 + # flake8: noqa """ Pinecone API - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) The version of the OpenAPI document: version not set Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 __version__ = "1.0.0" +# import apis into sdk package +from pinecone.core.client.api.index_operations_api import IndexOperationsApi +from pinecone.core.client.api.vector_operations_api import VectorOperationsApi + # import ApiClient +from pinecone.core.client.api_response import ApiResponse from pinecone.core.client.api_client import ApiClient - -# import Configuration from pinecone.core.client.configuration import Configuration - -# import exceptions from pinecone.core.client.exceptions import OpenApiException -from pinecone.core.client.exceptions import ApiAttributeError from pinecone.core.client.exceptions import ApiTypeError from pinecone.core.client.exceptions import ApiValueError from pinecone.core.client.exceptions import ApiKeyError +from pinecone.core.client.exceptions import ApiAttributeError from pinecone.core.client.exceptions import ApiException + +# import models into sdk package +from pinecone.core.client.models.approximated_config import ApproximatedConfig +from pinecone.core.client.models.collection_meta import CollectionMeta +from pinecone.core.client.models.create_collection_request import CreateCollectionRequest +from pinecone.core.client.models.create_request import CreateRequest +from pinecone.core.client.models.create_request_index_config import CreateRequestIndexConfig +from pinecone.core.client.models.delete_request import DeleteRequest +from pinecone.core.client.models.describe_index_stats_request import DescribeIndexStatsRequest +from pinecone.core.client.models.describe_index_stats_response import DescribeIndexStatsResponse +from pinecone.core.client.models.fetch_response import FetchResponse +from pinecone.core.client.models.hnsw_config import HnswConfig +from pinecone.core.client.models.index_meta import IndexMeta +from pinecone.core.client.models.index_meta_database import IndexMetaDatabase +from pinecone.core.client.models.index_meta_database_index_config import IndexMetaDatabaseIndexConfig +from pinecone.core.client.models.index_meta_status import IndexMetaStatus +from pinecone.core.client.models.list_indexes200_response import ListIndexes200Response +from pinecone.core.client.models.namespace_summary import NamespaceSummary +from pinecone.core.client.models.patch_request import PatchRequest +from pinecone.core.client.models.protobuf_any import ProtobufAny +from pinecone.core.client.models.protobuf_null_value import ProtobufNullValue +from pinecone.core.client.models.query_request import QueryRequest +from pinecone.core.client.models.query_response import QueryResponse +from pinecone.core.client.models.query_vector import QueryVector +from pinecone.core.client.models.rpc_status import RpcStatus +from pinecone.core.client.models.scored_vector import ScoredVector +from pinecone.core.client.models.single_query_results import SingleQueryResults +from pinecone.core.client.models.sparse_values import SparseValues +from pinecone.core.client.models.update_request import UpdateRequest +from pinecone.core.client.models.upsert_request import UpsertRequest +from pinecone.core.client.models.upsert_response import UpsertResponse +from pinecone.core.client.models.vector import Vector diff --git a/pinecone/core/client/api/__init__.py b/pinecone/core/client/api/__init__.py index 14e30437..dc3b0a5b 100644 --- a/pinecone/core/client/api/__init__.py +++ b/pinecone/core/client/api/__init__.py @@ -1,3 +1,5 @@ -# do not import all apis into this module because that uses a lot of memory and stack frames -# if you need the ability to import all apis from one package, import them with -# from pinecone.core.client.apis import IndexOperationsApi +# flake8: noqa + +# import apis into api package +from pinecone.core.client.api.index_operations_api import IndexOperationsApi +from pinecone.core.client.api.vector_operations_api import VectorOperationsApi diff --git a/pinecone/core/client/api/index_operations_api.py b/pinecone/core/client/api/index_operations_api.py index 5da15f3e..5c33010f 100644 --- a/pinecone/core/client/api/index_operations_api.py +++ b/pinecone/core/client/api/index_operations_api.py @@ -1,903 +1,1411 @@ +# coding: utf-8 + """ - Pinecone index operations API + Pinecone API - # noqa: E501 + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + The version of the OpenAPI document: version not set Contact: support@pinecone.io -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.api_client import ApiClient, Endpoint as _Endpoint -from pinecone.core.client.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types, -) -from pinecone.core.client.model.collection_meta import CollectionMeta -from pinecone.core.client.model.create_collection_request import CreateCollectionRequest -from pinecone.core.client.model.create_request import CreateRequest -from pinecone.core.client.model.index_meta import IndexMeta -from pinecone.core.client.model.patch_request import PatchRequest - - -class IndexOperationsApi(object): +import io +import warnings + +from pydantic import validate_arguments, ValidationError + +from typing_extensions import Annotated +from pydantic import Field, StrictStr + +from typing import List, Optional + +from pinecone.core.client.models.collection_meta import CollectionMeta +from pinecone.core.client.models.create_collection_request import CreateCollectionRequest +from pinecone.core.client.models.create_request import CreateRequest +from pinecone.core.client.models.index_meta import IndexMeta +from pinecone.core.client.models.list_indexes200_response import ListIndexes200Response +from pinecone.core.client.models.patch_request import PatchRequest + +from pinecone.core.client.api_client import ApiClient +from pinecone.core.client.api_response import ApiResponse +from pinecone.core.client.exceptions import ApiTypeError, ApiValueError # noqa: F401 + + +class IndexOperationsApi: """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ - def __init__(self, api_client=None): + def __init__(self, api_client=None) -> None: if api_client is None: - api_client = ApiClient() + api_client = ApiClient.get_default() self.api_client = api_client - def __configure_index(self, index_name, **kwargs): - """configure_index # noqa: E501 - - This operation specifies the pod type and number of replicas for an index. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.configure_index(index_name, async_req=True) - >>> result = thread.get() - - Args: - index_name (str): The name of the index - - Keyword Args: - patch_request (PatchRequest): The desired pod type and replica configuration for the index.. [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - str - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - kwargs["index_name"] = index_name - return self.call_with_http_info(**kwargs) - - self.configure_index = _Endpoint( - settings={ - "response_type": (str,), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/databases/{indexName}", - "operation_id": "configure_index", - "http_method": "PATCH", - "servers": [ - { - "url": "https://controller.{environment}.pinecone.io", - "description": "No description provided", - "variables": { - "environment": { - "description": "No description provided", - "default_value": "unknown", - } - }, - }, - ], - }, - params_map={ - "all": [ - "index_name", - "patch_request", - ], - "required": [ - "index_name", - ], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "index_name": (str,), - "patch_request": (PatchRequest,), - }, - "attribute_map": { - "index_name": "indexName", - }, - "location_map": { - "index_name": "path", - "patch_request": "body", - }, - "collection_format_map": {}, - }, - headers_map={"accept": ["text/plain"], "content_type": ["application/json"]}, - api_client=api_client, - callable=__configure_index, + @validate_arguments + def configure_index( + self, + index_name: Annotated[StrictStr, Field(..., description="The name of the index")], + patch_request: Annotated[ + Optional[PatchRequest], Field(description="The desired pod type and replica configuration for the index.") + ] = None, + **kwargs, + ) -> IndexMeta: # noqa: E501 + """configure_index # noqa: E501 + + This operation specifies the pod type and number of replicas for an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.configure_index(index_name, patch_request, async_req=True) + >>> result = thread.get() + + :param index_name: The name of the index (required) + :type index_name: str + :param patch_request: The desired pod type and replica configuration for the index. + :type patch_request: PatchRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: IndexMeta + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the configure_index_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.configure_index_with_http_info(index_name, patch_request, **kwargs) # noqa: E501 + + @validate_arguments + def configure_index_with_http_info( + self, + index_name: Annotated[StrictStr, Field(..., description="The name of the index")], + patch_request: Annotated[ + Optional[PatchRequest], Field(description="The desired pod type and replica configuration for the index.") + ] = None, + **kwargs, + ) -> ApiResponse: # noqa: E501 + """configure_index # noqa: E501 + + This operation specifies the pod type and number of replicas for an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.configure_index_with_http_info(index_name, patch_request, async_req=True) + >>> result = thread.get() + + :param index_name: The name of the index (required) + :type index_name: str + :param patch_request: The desired pod type and replica configuration for the index. + :type patch_request: PatchRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(IndexMeta, status_code(int), headers(HTTPHeaderDict)) + """ + + _hosts = ["https://controller.{environment}.pinecone.io"] + _host = _hosts[0] + if kwargs.get("_host_index"): + _host_index = int(kwargs.get("_host_index")) + if _host_index < 0 or _host_index >= len(_hosts): + raise ApiValueError("Invalid host index. Must be 0 <= index < %s" % len(_host)) + _host = _hosts[_host_index] + _params = locals() + + _all_params = ["index_name", "patch_request"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] + ) + + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params and _key != "_host_index": + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method configure_index" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + if _params["index_name"]: + _path_params["indexName"] = _params["index_name"] + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + if _params["patch_request"] is not None: + _body_params = _params["patch_request"] + + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # noqa: E501 + + # set the HTTP header `Content-Type` + _content_types_list = _params.get( + "_content_type", self.api_client.select_header_content_type(["application/json"]) + ) + if _content_types_list: + _header_params["Content-Type"] = _content_types_list + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "202": "IndexMeta", + "400": None, + "404": None, + "500": None, + } + + return self.api_client.call_api( + "/databases/{indexName}", + "PATCH", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + _host=_host, + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), + ) + + @validate_arguments + def create_collection( + self, create_collection_request: Optional[CreateCollectionRequest] = None, **kwargs + ) -> str: # noqa: E501 + """create_collection # noqa: E501 + + This operation creates a Pinecone collection. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_collection(create_collection_request, async_req=True) + >>> result = thread.get() + + :param create_collection_request: + :type create_collection_request: CreateCollectionRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: str + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the create_collection_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.create_collection_with_http_info(create_collection_request, **kwargs) # noqa: E501 + + @validate_arguments + def create_collection_with_http_info( + self, create_collection_request: Optional[CreateCollectionRequest] = None, **kwargs + ) -> ApiResponse: # noqa: E501 + """create_collection # noqa: E501 + + This operation creates a Pinecone collection. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_collection_with_http_info(create_collection_request, async_req=True) + >>> result = thread.get() + + :param create_collection_request: + :type create_collection_request: CreateCollectionRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(str, status_code(int), headers(HTTPHeaderDict)) + """ + + _hosts = ["https://controller.{environment}.pinecone.io"] + _host = _hosts[0] + if kwargs.get("_host_index"): + _host_index = int(kwargs.get("_host_index")) + if _host_index < 0 or _host_index >= len(_hosts): + raise ApiValueError("Invalid host index. Must be 0 <= index < %s" % len(_host)) + _host = _hosts[_host_index] + _params = locals() + + _all_params = ["create_collection_request"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] + ) + + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params and _key != "_host_index": + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method create_collection" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + if _params["create_collection_request"] is not None: + _body_params = _params["create_collection_request"] + + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["text/plain"]) # noqa: E501 + + # set the HTTP header `Content-Type` + _content_types_list = _params.get( + "_content_type", self.api_client.select_header_content_type(["application/json"]) + ) + if _content_types_list: + _header_params["Content-Type"] = _content_types_list + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "201": "str", + "400": None, + "409": None, + "500": None, + } + + return self.api_client.call_api( + "/collections", + "POST", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + _host=_host, + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), + ) + + @validate_arguments + def create_index(self, create_request: Optional[CreateRequest] = None, **kwargs) -> IndexMeta: # noqa: E501 + """create_index # noqa: E501 + + This operation creates a Pinecone index. You can use it to specify the measure of similarity, the dimension of vectors to be stored in the index, the numbers of shards and replicas to use, and more. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_index(create_request, async_req=True) + >>> result = thread.get() + + :param create_request: + :type create_request: CreateRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: IndexMeta + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the create_index_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.create_index_with_http_info(create_request, **kwargs) # noqa: E501 + + @validate_arguments + def create_index_with_http_info( + self, create_request: Optional[CreateRequest] = None, **kwargs + ) -> ApiResponse: # noqa: E501 + """create_index # noqa: E501 + + This operation creates a Pinecone index. You can use it to specify the measure of similarity, the dimension of vectors to be stored in the index, the numbers of shards and replicas to use, and more. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_index_with_http_info(create_request, async_req=True) + >>> result = thread.get() + + :param create_request: + :type create_request: CreateRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(IndexMeta, status_code(int), headers(HTTPHeaderDict)) + """ + + _hosts = ["https://controller.{environment}.pinecone.io"] + _host = _hosts[0] + if kwargs.get("_host_index"): + _host_index = int(kwargs.get("_host_index")) + if _host_index < 0 or _host_index >= len(_hosts): + raise ApiValueError("Invalid host index. Must be 0 <= index < %s" % len(_host)) + _host = _hosts[_host_index] + _params = locals() + + _all_params = ["create_request"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] + ) + + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params and _key != "_host_index": + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method create_index" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + if _params["create_request"] is not None: + _body_params = _params["create_request"] + + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # noqa: E501 + + # set the HTTP header `Content-Type` + _content_types_list = _params.get( + "_content_type", self.api_client.select_header_content_type(["application/json"]) + ) + if _content_types_list: + _header_params["Content-Type"] = _content_types_list + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "201": "IndexMeta", + "400": None, + "409": None, + "500": None, + } + + return self.api_client.call_api( + "/databases", + "POST", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + _host=_host, + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), + ) + + @validate_arguments + def delete_collection( + self, collection_name: Annotated[StrictStr, Field(..., description="The name of the collection")], **kwargs + ) -> str: # noqa: E501 + """delete_collection # noqa: E501 + + This operation deletes an existing collection. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_collection(collection_name, async_req=True) + >>> result = thread.get() + + :param collection_name: The name of the collection (required) + :type collection_name: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: str + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the delete_collection_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.delete_collection_with_http_info(collection_name, **kwargs) # noqa: E501 + + @validate_arguments + def delete_collection_with_http_info( + self, collection_name: Annotated[StrictStr, Field(..., description="The name of the collection")], **kwargs + ) -> ApiResponse: # noqa: E501 + """delete_collection # noqa: E501 + + This operation deletes an existing collection. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_collection_with_http_info(collection_name, async_req=True) + >>> result = thread.get() + + :param collection_name: The name of the collection (required) + :type collection_name: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(str, status_code(int), headers(HTTPHeaderDict)) + """ + + _hosts = ["https://controller.{environment}.pinecone.io"] + _host = _hosts[0] + if kwargs.get("_host_index"): + _host_index = int(kwargs.get("_host_index")) + if _host_index < 0 or _host_index >= len(_hosts): + raise ApiValueError("Invalid host index. Must be 0 <= index < %s" % len(_host)) + _host = _hosts[_host_index] + _params = locals() + + _all_params = ["collection_name"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] + ) + + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params and _key != "_host_index": + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method delete_collection" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + if _params["collection_name"]: + _path_params["collectionName"] = _params["collection_name"] + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["text/plain"]) # noqa: E501 + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "202": "str", + "404": None, + "500": None, + } + + return self.api_client.call_api( + "/collections/{collectionName}", + "DELETE", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + _host=_host, + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), + ) + + @validate_arguments + def delete_index( + self, index_name: Annotated[StrictStr, Field(..., description="The name of the index")], **kwargs + ) -> str: # noqa: E501 + """delete_index # noqa: E501 + + This operation deletes an existing index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_index(index_name, async_req=True) + >>> result = thread.get() + + :param index_name: The name of the index (required) + :type index_name: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: str + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the delete_index_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.delete_index_with_http_info(index_name, **kwargs) # noqa: E501 + + @validate_arguments + def delete_index_with_http_info( + self, index_name: Annotated[StrictStr, Field(..., description="The name of the index")], **kwargs + ) -> ApiResponse: # noqa: E501 + """delete_index # noqa: E501 + + This operation deletes an existing index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_index_with_http_info(index_name, async_req=True) + >>> result = thread.get() + + :param index_name: The name of the index (required) + :type index_name: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(str, status_code(int), headers(HTTPHeaderDict)) + """ + + _hosts = ["https://controller.{environment}.pinecone.io"] + _host = _hosts[0] + if kwargs.get("_host_index"): + _host_index = int(kwargs.get("_host_index")) + if _host_index < 0 or _host_index >= len(_hosts): + raise ApiValueError("Invalid host index. Must be 0 <= index < %s" % len(_host)) + _host = _hosts[_host_index] + _params = locals() + + _all_params = ["index_name"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] + ) + + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params and _key != "_host_index": + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method delete_index" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + if _params["index_name"]: + _path_params["indexName"] = _params["index_name"] + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["text/plain"]) # noqa: E501 + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "202": "str", + "404": None, + "500": None, + } + + return self.api_client.call_api( + "/databases/{indexName}", + "DELETE", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + _host=_host, + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), ) - def __create_collection(self, **kwargs): - """create_collection # noqa: E501 - - This operation creates a Pinecone collection from an existing index. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.create_collection(async_req=True) - >>> result = thread.get() - - - Keyword Args: - create_collection_request (CreateCollectionRequest): [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - str - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - return self.call_with_http_info(**kwargs) - - self.create_collection = _Endpoint( - settings={ - "response_type": (str,), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/collections", - "operation_id": "create_collection", - "http_method": "POST", - "servers": [ - { - "url": "https://controller.{environment}.pinecone.io", - "description": "No description provided", - "variables": { - "environment": { - "description": "No description provided", - "default_value": "unknown", - } - }, - }, - ], - }, - params_map={ - "all": [ - "create_collection_request", - ], - "required": [], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "create_collection_request": (CreateCollectionRequest,), - }, - "attribute_map": {}, - "location_map": { - "create_collection_request": "body", - }, - "collection_format_map": {}, - }, - headers_map={"accept": ["text/plain"], "content_type": ["application/json"]}, - api_client=api_client, - callable=__create_collection, + @validate_arguments + def describe_collection( + self, collection_name: Annotated[StrictStr, Field(..., description="The name of the collection")], **kwargs + ) -> CollectionMeta: # noqa: E501 + """describe_collection # noqa: E501 + + Get a description of a collection. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.describe_collection(collection_name, async_req=True) + >>> result = thread.get() + + :param collection_name: The name of the collection (required) + :type collection_name: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: CollectionMeta + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the describe_collection_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.describe_collection_with_http_info(collection_name, **kwargs) # noqa: E501 + + @validate_arguments + def describe_collection_with_http_info( + self, collection_name: Annotated[StrictStr, Field(..., description="The name of the collection")], **kwargs + ) -> ApiResponse: # noqa: E501 + """describe_collection # noqa: E501 + + Get a description of a collection. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.describe_collection_with_http_info(collection_name, async_req=True) + >>> result = thread.get() + + :param collection_name: The name of the collection (required) + :type collection_name: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(CollectionMeta, status_code(int), headers(HTTPHeaderDict)) + """ + + _hosts = ["https://controller.{environment}.pinecone.io"] + _host = _hosts[0] + if kwargs.get("_host_index"): + _host_index = int(kwargs.get("_host_index")) + if _host_index < 0 or _host_index >= len(_hosts): + raise ApiValueError("Invalid host index. Must be 0 <= index < %s" % len(_host)) + _host = _hosts[_host_index] + _params = locals() + + _all_params = ["collection_name"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] ) - def __create_index(self, **kwargs): - """create_index # noqa: E501 - - This operation creates a Pinecone index. Specify the distance metric, the dimension of vectors to be stored in the index, the numbers replicas to use, and the collection from which to create the index, if applicable. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.create_index(async_req=True) - >>> result = thread.get() - - - Keyword Args: - create_request (CreateRequest): [optional] - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - str - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - return self.call_with_http_info(**kwargs) - - self.create_index = _Endpoint( - settings={ - "response_type": (str,), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/databases", - "operation_id": "create_index", - "http_method": "POST", - "servers": [ - { - "url": "https://controller.{environment}.pinecone.io", - "description": "No description provided", - "variables": { - "environment": { - "description": "No description provided", - "default_value": "unknown", - } - }, - }, - ], - }, - params_map={ - "all": [ - "create_request", - ], - "required": [], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "create_request": (CreateRequest,), - }, - "attribute_map": {}, - "location_map": { - "create_request": "body", - }, - "collection_format_map": {}, - }, - headers_map={"accept": ["text/plain"], "content_type": ["application/json"]}, - api_client=api_client, - callable=__create_index, + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params and _key != "_host_index": + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method describe_collection" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + if _params["collection_name"]: + _path_params["collectionName"] = _params["collection_name"] + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # noqa: E501 + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "200": "CollectionMeta", + "404": None, + "500": None, + } + + return self.api_client.call_api( + "/collections/{collectionName}", + "GET", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + _host=_host, + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), ) - def __delete_collection(self, collection_name, **kwargs): - """delete_collection # noqa: E501 - - This operation deletes an existing collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.delete_collection(collection_name, async_req=True) - >>> result = thread.get() - - Args: - collection_name (str): The name of the collection - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - str - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - kwargs["collection_name"] = collection_name - return self.call_with_http_info(**kwargs) - - self.delete_collection = _Endpoint( - settings={ - "response_type": (str,), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/collections/{collectionName}", - "operation_id": "delete_collection", - "http_method": "DELETE", - "servers": [ - { - "url": "https://controller.{environment}.pinecone.io", - "description": "No description provided", - "variables": { - "environment": { - "description": "No description provided", - "default_value": "unknown", - } - }, - }, - ], - }, - params_map={ - "all": [ - "collection_name", - ], - "required": [ - "collection_name", - ], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "collection_name": (str,), - }, - "attribute_map": { - "collection_name": "collectionName", - }, - "location_map": { - "collection_name": "path", - }, - "collection_format_map": {}, - }, - headers_map={ - "accept": ["text/plain"], - "content_type": [], - }, - api_client=api_client, - callable=__delete_collection, + @validate_arguments + def describe_index( + self, index_name: Annotated[StrictStr, Field(..., description="The name of the index")], **kwargs + ) -> IndexMeta: # noqa: E501 + """describe_index # noqa: E501 + + Get a description of an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.describe_index(index_name, async_req=True) + >>> result = thread.get() + + :param index_name: The name of the index (required) + :type index_name: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: IndexMeta + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the describe_index_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.describe_index_with_http_info(index_name, **kwargs) # noqa: E501 + + @validate_arguments + def describe_index_with_http_info( + self, index_name: Annotated[StrictStr, Field(..., description="The name of the index")], **kwargs + ) -> ApiResponse: # noqa: E501 + """describe_index # noqa: E501 + + Get a description of an index. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.describe_index_with_http_info(index_name, async_req=True) + >>> result = thread.get() + + :param index_name: The name of the index (required) + :type index_name: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(IndexMeta, status_code(int), headers(HTTPHeaderDict)) + """ + + _hosts = ["https://controller.{environment}.pinecone.io"] + _host = _hosts[0] + if kwargs.get("_host_index"): + _host_index = int(kwargs.get("_host_index")) + if _host_index < 0 or _host_index >= len(_hosts): + raise ApiValueError("Invalid host index. Must be 0 <= index < %s" % len(_host)) + _host = _hosts[_host_index] + _params = locals() + + _all_params = ["index_name"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] ) - def __delete_index(self, index_name, **kwargs): - """delete_index # noqa: E501 - - This operation deletes an existing index. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.delete_index(index_name, async_req=True) - >>> result = thread.get() - - Args: - index_name (str): The name of the index - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - str - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - kwargs["index_name"] = index_name - return self.call_with_http_info(**kwargs) - - self.delete_index = _Endpoint( - settings={ - "response_type": (str,), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/databases/{indexName}", - "operation_id": "delete_index", - "http_method": "DELETE", - "servers": [ - { - "url": "https://controller.{environment}.pinecone.io", - "description": "No description provided", - "variables": { - "environment": { - "description": "No description provided", - "default_value": "unknown", - } - }, - }, - ], - }, - params_map={ - "all": [ - "index_name", - ], - "required": [ - "index_name", - ], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "index_name": (str,), - }, - "attribute_map": { - "index_name": "indexName", - }, - "location_map": { - "index_name": "path", - }, - "collection_format_map": {}, - }, - headers_map={ - "accept": ["text/plain"], - "content_type": [], - }, - api_client=api_client, - callable=__delete_index, + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params and _key != "_host_index": + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method describe_index" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + if _params["index_name"]: + _path_params["indexName"] = _params["index_name"] + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # noqa: E501 + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "200": "IndexMeta", + "404": None, + "500": None, + } + + return self.api_client.call_api( + "/databases/{indexName}", + "GET", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + _host=_host, + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), ) - def __describe_collection(self, collection_name, **kwargs): - """describe_collection # noqa: E501 - - Get a description of a collection, including the name, size, and status. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.describe_collection(collection_name, async_req=True) - >>> result = thread.get() - - Args: - collection_name (str): The name of the collection - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - CollectionMeta - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - kwargs["collection_name"] = collection_name - return self.call_with_http_info(**kwargs) - - self.describe_collection = _Endpoint( - settings={ - "response_type": (CollectionMeta,), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/collections/{collectionName}", - "operation_id": "describe_collection", - "http_method": "GET", - "servers": [ - { - "url": "https://controller.{environment}.pinecone.io", - "description": "No description provided", - "variables": { - "environment": { - "description": "No description provided", - "default_value": "unknown", - } - }, - }, - ], - }, - params_map={ - "all": [ - "collection_name", - ], - "required": [ - "collection_name", - ], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "collection_name": (str,), - }, - "attribute_map": { - "collection_name": "collectionName", - }, - "location_map": { - "collection_name": "path", - }, - "collection_format_map": {}, - }, - headers_map={ - "accept": ["application/json"], - "content_type": [], - }, - api_client=api_client, - callable=__describe_collection, + @validate_arguments + def list_collections(self, **kwargs) -> List[str]: # noqa: E501 + """list_collections # noqa: E501 + + This operation returns a list of your Pinecone collections. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_collections(async_req=True) + >>> result = thread.get() + + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: List[str] + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the list_collections_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.list_collections_with_http_info(**kwargs) # noqa: E501 + + @validate_arguments + def list_collections_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 + """list_collections # noqa: E501 + + This operation returns a list of your Pinecone collections. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_collections_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(List[str], status_code(int), headers(HTTPHeaderDict)) + """ + + _hosts = ["https://controller.{environment}.pinecone.io"] + _host = _hosts[0] + if kwargs.get("_host_index"): + _host_index = int(kwargs.get("_host_index")) + if _host_index < 0 or _host_index >= len(_hosts): + raise ApiValueError("Invalid host index. Must be 0 <= index < %s" % len(_host)) + _host = _hosts[_host_index] + _params = locals() + + _all_params = [] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] ) - def __describe_index(self, index_name, **kwargs): - """describe_index # noqa: E501 - - Get a description of an index, including dimension, distance metric, number of replicas, and more. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.describe_index(index_name, async_req=True) - >>> result = thread.get() - - Args: - index_name (str): The name of the index - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - IndexMeta - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - kwargs["index_name"] = index_name - return self.call_with_http_info(**kwargs) - - self.describe_index = _Endpoint( - settings={ - "response_type": (IndexMeta,), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/databases/{indexName}", - "operation_id": "describe_index", - "http_method": "GET", - "servers": [ - { - "url": "https://controller.{environment}.pinecone.io", - "description": "No description provided", - "variables": { - "environment": { - "description": "No description provided", - "default_value": "unknown", - } - }, - }, - ], - }, - params_map={ - "all": [ - "index_name", - ], - "required": [ - "index_name", - ], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "index_name": (str,), - }, - "attribute_map": { - "index_name": "indexName", - }, - "location_map": { - "index_name": "path", - }, - "collection_format_map": {}, - }, - headers_map={ - "accept": ["application/json"], - "content_type": [], - }, - api_client=api_client, - callable=__describe_index, + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params and _key != "_host_index": + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method list_collections" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json; charset=utf-8"] + ) # noqa: E501 + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "200": "List[str]", + } + + return self.api_client.call_api( + "/collections", + "GET", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + _host=_host, + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), ) - def __list_collections(self, **kwargs): - """list_collections # noqa: E501 - - This operation returns a list of your Pinecone collections. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_collections(async_req=True) - >>> result = thread.get() - - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - [str] - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - return self.call_with_http_info(**kwargs) - - self.list_collections = _Endpoint( - settings={ - "response_type": ([str],), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/collections", - "operation_id": "list_collections", - "http_method": "GET", - "servers": [ - { - "url": "https://controller.{environment}.pinecone.io", - "description": "No description provided", - "variables": { - "environment": { - "description": "No description provided", - "default_value": "unknown", - } - }, - }, - ], - }, - params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": {}, - "attribute_map": {}, - "location_map": {}, - "collection_format_map": {}, - }, - headers_map={ - "accept": ["application/json; charset=utf-8"], - "content_type": [], - }, - api_client=api_client, - callable=__list_collections, + @validate_arguments + def list_indexes(self, **kwargs) -> ListIndexes200Response: # noqa: E501 + """list_indexes # noqa: E501 + + This operation returns a list of your Pinecone indexes. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_indexes(async_req=True) + >>> result = thread.get() + + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: ListIndexes200Response + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the list_indexes_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.list_indexes_with_http_info(**kwargs) # noqa: E501 + + @validate_arguments + def list_indexes_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 + """list_indexes # noqa: E501 + + This operation returns a list of your Pinecone indexes. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_indexes_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(ListIndexes200Response, status_code(int), headers(HTTPHeaderDict)) + """ + + _hosts = ["https://controller.{environment}.pinecone.io"] + _host = _hosts[0] + if kwargs.get("_host_index"): + _host_index = int(kwargs.get("_host_index")) + if _host_index < 0 or _host_index >= len(_hosts): + raise ApiValueError("Invalid host index. Must be 0 <= index < %s" % len(_host)) + _host = _hosts[_host_index] + _params = locals() + + _all_params = [] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] ) - def __list_indexes(self, **kwargs): - """list_indexes # noqa: E501 - - This operation returns a list of the indexes in the current project. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - - >>> thread = api.list_indexes(async_req=True) - >>> result = thread.get() - - - Keyword Args: - _return_http_data_only (bool): response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - will be returned without reading/decoding response data. - Default is True. - _request_timeout (int/float/tuple): timeout setting for this request. If - one number provided, it will be total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): specifies if type checking - should be done one the data sent to the server. - Default is True. - _check_return_type (bool): specifies if type checking - should be done one the data received from the server. - Default is True. - _host_index (int/None): specifies the index of the server - that we want to use. - Default is read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - [str] - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - return self.call_with_http_info(**kwargs) - - self.list_indexes = _Endpoint( - settings={ - "response_type": ([str],), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/databases", - "operation_id": "list_indexes", - "http_method": "GET", - "servers": [ - { - "url": "https://controller.{environment}.pinecone.io", - "description": "No description provided", - "variables": { - "environment": { - "description": "No description provided", - "default_value": "unknown", - } - }, - }, - ], - }, - params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": {}, - "attribute_map": {}, - "location_map": {}, - "collection_format_map": {}, - }, - headers_map={ - "accept": ["application/json; charset=utf-8"], - "content_type": [], - }, - api_client=api_client, - callable=__list_indexes, + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params and _key != "_host_index": + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method list_indexes" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json; charset=utf-8"] + ) # noqa: E501 + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "200": "ListIndexes200Response", + } + + return self.api_client.call_api( + "/databases", + "GET", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + _host=_host, + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), ) diff --git a/pinecone/core/client/api/vector_operations_api.py b/pinecone/core/client/api/vector_operations_api.py index 1f6d2d25..98c1d1a6 100644 --- a/pinecone/core/client/api/vector_operations_api.py +++ b/pinecone/core/client/api/vector_operations_api.py @@ -1,746 +1,1233 @@ +# coding: utf-8 + """ - Pinecone vector operations API + Pinecone API - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) The version of the OpenAPI document: version not set Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.api_client import ApiClient, Endpoint as _Endpoint -from pinecone.core.client.model_utils import ( # noqa: F401 - check_allowed_values, - check_validations, - date, - datetime, - file_type, - none_type, - validate_and_convert_types, -) -from pinecone.core.client.model.delete_request import DeleteRequest -from pinecone.core.client.model.describe_index_stats_request import DescribeIndexStatsRequest -from pinecone.core.client.model.describe_index_stats_response import DescribeIndexStatsResponse -from pinecone.core.client.model.fetch_response import FetchResponse -from pinecone.core.client.model.query_request import QueryRequest -from pinecone.core.client.model.query_response import QueryResponse -from pinecone.core.client.model.rpc_status import RpcStatus -from pinecone.core.client.model.update_request import UpdateRequest -from pinecone.core.client.model.upsert_request import UpsertRequest -from pinecone.core.client.model.upsert_response import UpsertResponse - - -class VectorOperationsApi(object): +import io +import warnings + +from pydantic import validate_arguments, ValidationError + +from typing_extensions import Annotated +from pydantic import Field, StrictBool, StrictStr, conlist + +from typing import Any, Dict, Optional + +from pinecone.core.client.models.delete_request import DeleteRequest +from pinecone.core.client.models.describe_index_stats_request import DescribeIndexStatsRequest +from pinecone.core.client.models.describe_index_stats_response import DescribeIndexStatsResponse +from pinecone.core.client.models.fetch_response import FetchResponse +from pinecone.core.client.models.query_request import QueryRequest +from pinecone.core.client.models.query_response import QueryResponse +from pinecone.core.client.models.update_request import UpdateRequest +from pinecone.core.client.models.upsert_request import UpsertRequest +from pinecone.core.client.models.upsert_response import UpsertResponse + +from pinecone.core.client.api_client import ApiClient +from pinecone.core.client.api_response import ApiResponse +from pinecone.core.client.exceptions import ApiTypeError, ApiValueError # noqa: F401 + + +class VectorOperationsApi: """NOTE: This class is auto generated by OpenAPI Generator Ref: https://openapi-generator.tech Do not edit the class manually. """ - def __init__(self, api_client=None): + def __init__(self, api_client=None) -> None: if api_client is None: - api_client = ApiClient() + api_client = ApiClient.get_default() self.api_client = api_client - def __delete(self, delete_request, **kwargs): - """Delete # noqa: E501 - - The `Delete` operation deletes records by ID from a single namespace. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True - - >>> thread = api.delete(delete_request, async_req=True) - >>> result = thread.get() - - Args: - delete_request (DeleteRequest): - - Keyword Args: - _return_http_data_only (bool): Return response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. - Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. - Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. - Default is True. - _host_index (int/None): Specifies the index of the server - that we want to use. - Default is to read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - {str: (bool, date, datetime, dict, float, int, list, str, none_type)} - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - kwargs["delete_request"] = delete_request - return self.call_with_http_info(**kwargs) - - self.delete = _Endpoint( - settings={ - "response_type": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/vectors/delete", - "operation_id": "delete", - "http_method": "POST", - "servers": None, - }, - params_map={ - "all": [ - "delete_request", - ], - "required": [ - "delete_request", - ], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "delete_request": (DeleteRequest,), - }, - "attribute_map": {}, - "location_map": { - "delete_request": "body", - }, - "collection_format_map": {}, - }, - headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, - api_client=api_client, - callable=__delete, + @validate_arguments + def delete(self, delete_request: DeleteRequest, **kwargs) -> object: # noqa: E501 + """Delete # noqa: E501 + + The `Delete` operation deletes vectors, by id, from a single namespace. You can delete items by their id, from a single namespace. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete(delete_request, async_req=True) + >>> result = thread.get() + + :param delete_request: (required) + :type delete_request: DeleteRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the delete_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.delete_with_http_info(delete_request, **kwargs) # noqa: E501 + + @validate_arguments + def delete_with_http_info(self, delete_request: DeleteRequest, **kwargs) -> ApiResponse: # noqa: E501 + """Delete # noqa: E501 + + The `Delete` operation deletes vectors, by id, from a single namespace. You can delete items by their id, from a single namespace. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_with_http_info(delete_request, async_req=True) + >>> result = thread.get() + + :param delete_request: (required) + :type delete_request: DeleteRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + _params = locals() + + _all_params = ["delete_request"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] + ) + + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params: + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method delete" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + if _params["delete_request"] is not None: + _body_params = _params["delete_request"] + + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # noqa: E501 + + # set the HTTP header `Content-Type` + _content_types_list = _params.get( + "_content_type", self.api_client.select_header_content_type(["application/json"]) + ) + if _content_types_list: + _header_params["Content-Type"] = _content_types_list + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "200": "object", + } + + return self.api_client.call_api( + "/vectors/delete", + "POST", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), + ) + + @validate_arguments + def delete1( + self, + ids: Annotated[Optional[conlist(StrictStr)], Field(description="Vectors to delete.")] = None, + delete_all: Annotated[ + Optional[StrictBool], + Field(description="This indicates that all vectors in the index namespace should be deleted."), + ] = None, + namespace: Annotated[ + Optional[StrictStr], Field(description="The namespace to delete vectors from, if applicable.") + ] = None, + **kwargs, + ) -> object: # noqa: E501 + """Delete # noqa: E501 + + The `Delete` operation deletes vectors, by id, from a single namespace. You can delete items by their id, from a single namespace. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete1(ids, delete_all, namespace, async_req=True) + >>> result = thread.get() + + :param ids: Vectors to delete. + :type ids: List[str] + :param delete_all: This indicates that all vectors in the index namespace should be deleted. + :type delete_all: bool + :param namespace: The namespace to delete vectors from, if applicable. + :type namespace: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the delete1_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.delete1_with_http_info(ids, delete_all, namespace, **kwargs) # noqa: E501 + + @validate_arguments + def delete1_with_http_info( + self, + ids: Annotated[Optional[conlist(StrictStr)], Field(description="Vectors to delete.")] = None, + delete_all: Annotated[ + Optional[StrictBool], + Field(description="This indicates that all vectors in the index namespace should be deleted."), + ] = None, + namespace: Annotated[ + Optional[StrictStr], Field(description="The namespace to delete vectors from, if applicable.") + ] = None, + **kwargs, + ) -> ApiResponse: # noqa: E501 + """Delete # noqa: E501 + + The `Delete` operation deletes vectors, by id, from a single namespace. You can delete items by their id, from a single namespace. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete1_with_http_info(ids, delete_all, namespace, async_req=True) + >>> result = thread.get() + + :param ids: Vectors to delete. + :type ids: List[str] + :param delete_all: This indicates that all vectors in the index namespace should be deleted. + :type delete_all: bool + :param namespace: The namespace to delete vectors from, if applicable. + :type namespace: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + _params = locals() + + _all_params = ["ids", "delete_all", "namespace"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] ) - def __delete1(self, **kwargs): - """Delete # noqa: E501 - - The `Delete` operation deletes records by ID from a single namespace. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True - - >>> thread = api.delete1(async_req=True) - >>> result = thread.get() - - - Keyword Args: - ids ([str]): Vectors to delete. [optional] - delete_all (bool): This indicates that all records in the index namespace should be deleted. [optional] Default is False. - namespace (str): The namespace to delete records from, if applicable. [optional] - _return_http_data_only (bool): Response data without head status - code and headers. Default is True. - _preload_content (bool): If False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. - Default is True. - _request_timeout (int/float/tuple): Timeout setting for this request. If - one number is provided, this number is the total request timeout. This argument can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): Specifies if type checking - should be done after the data is sent to the server. - Default is True. - _check_return_type (bool): Specifies if type checking - should be done after the data is received from the server. - Default is True. - _host_index (int/None): Specifies the index of the server - to use. - Default is to read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - {str: (bool, date, datetime, dict, float, int, list, str, none_type)} - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - return self.call_with_http_info(**kwargs) - - self.delete1 = _Endpoint( - settings={ - "response_type": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/vectors/delete", - "operation_id": "delete1", - "http_method": "DELETE", - "servers": None, - }, - params_map={ - "all": [ - "ids", - "delete_all", - "namespace", - ], - "required": [], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "ids": ([str],), - "delete_all": (bool,), - "namespace": (str,), - }, - "attribute_map": { - "ids": "ids", - "delete_all": "deleteAll", - "namespace": "namespace", - }, - "location_map": { - "ids": "query", - "delete_all": "query", - "namespace": "query", - }, - "collection_format_map": { - "ids": "multi", - }, - }, - headers_map={ - "accept": ["application/json"], - "content_type": [], - }, - api_client=api_client, - callable=__delete1, + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params: + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method delete1" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + + # process the query parameters + _query_params = [] + if _params.get("ids") is not None: # noqa: E501 + _query_params.append(("ids", _params["ids"])) + _collection_formats["ids"] = "multi" + + if _params.get("delete_all") is not None: # noqa: E501 + _query_params.append(("deleteAll", _params["delete_all"])) + + if _params.get("namespace") is not None: # noqa: E501 + _query_params.append(("namespace", _params["namespace"])) + + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # noqa: E501 + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "200": "object", + } + + return self.api_client.call_api( + "/vectors/delete", + "DELETE", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), ) - def __describe_index_stats(self, describe_index_stats_request, **kwargs): - """DescribeIndexStats # noqa: E501 - - The `DescribeIndexStats` operation returns statistics about the index's contents, including the vector count per namespace, the number of dimensions, and the index fullness. The index fullness result may be inaccurate during pod resizing; to get the status of a pod resizing process, use [`describe_index`](https://www.pinecone.io/docs/api/operation/describe_index/). # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True - - >>> thread = api.describe_index_stats(describe_index_stats_request, async_req=True) - >>> result = thread.get() - - Args: - describe_index_stats_request (DescribeIndexStatsRequest): - - Keyword Args: - _return_http_data_only (bool): Return head status - code or headers. Default is True. - _preload_content (bool): If False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. - Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. - Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. - Default is True. - _host_index (int/None): Specifies the index of the server - that we want to use. - Default is to read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - DescribeIndexStatsResponse - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - kwargs["describe_index_stats_request"] = describe_index_stats_request - return self.call_with_http_info(**kwargs) - - self.describe_index_stats = _Endpoint( - settings={ - "response_type": (DescribeIndexStatsResponse,), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/describe_index_stats", - "operation_id": "describe_index_stats", - "http_method": "POST", - "servers": None, - }, - params_map={ - "all": [ - "describe_index_stats_request", - ], - "required": [ - "describe_index_stats_request", - ], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "describe_index_stats_request": (DescribeIndexStatsRequest,), - }, - "attribute_map": {}, - "location_map": { - "describe_index_stats_request": "body", - }, - "collection_format_map": {}, - }, - headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, - api_client=api_client, - callable=__describe_index_stats, + @validate_arguments + def describe_index_stats( + self, describe_index_stats_request: DescribeIndexStatsRequest, **kwargs + ) -> DescribeIndexStatsResponse: # noqa: E501 + """DescribeIndexStats # noqa: E501 + + The `DescribeIndexStats` operation returns statistics about the index's contents, including the vector count per namespace, the number of dimensions, and the index fullness. The index fullness result may be inaccurate during pod resizing; to get the status of a pod resizing process, use [`describe_index`](https://www.pinecone.io/docs/api/operation/describe_index/). # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.describe_index_stats(describe_index_stats_request, async_req=True) + >>> result = thread.get() + + :param describe_index_stats_request: (required) + :type describe_index_stats_request: DescribeIndexStatsRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: DescribeIndexStatsResponse + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the describe_index_stats_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.describe_index_stats_with_http_info(describe_index_stats_request, **kwargs) # noqa: E501 + + @validate_arguments + def describe_index_stats_with_http_info( + self, describe_index_stats_request: DescribeIndexStatsRequest, **kwargs + ) -> ApiResponse: # noqa: E501 + """DescribeIndexStats # noqa: E501 + + The `DescribeIndexStats` operation returns statistics about the index's contents, including the vector count per namespace, the number of dimensions, and the index fullness. The index fullness result may be inaccurate during pod resizing; to get the status of a pod resizing process, use [`describe_index`](https://www.pinecone.io/docs/api/operation/describe_index/). # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.describe_index_stats_with_http_info(describe_index_stats_request, async_req=True) + >>> result = thread.get() + + :param describe_index_stats_request: (required) + :type describe_index_stats_request: DescribeIndexStatsRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(DescribeIndexStatsResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + _params = locals() + + _all_params = ["describe_index_stats_request"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] ) - def __describe_index_stats1(self, **kwargs): - """DescribeIndexStats # noqa: E501 - - The `DescribeIndexStats` operation returns statistics about the index's contents, including the vector count per namespace, the number of dimensions, and the index fullness. The index fullness result may be inaccurate during pod resizing; to get the status of a pod resizing process, use [`describe_index`](https://www.pinecone.io/docs/api/operation/describe_index/). # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True - - >>> thread = api.describe_index_stats1(async_req=True) - >>> result = thread.get() - - - Keyword Args: - _return_http_data_only (bool): Response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. - Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. - Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. - Default is True. - _host_index (int/None): Specifies the index of the server - that we want to use. - Default is to read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - DescribeIndexStatsResponse - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - return self.call_with_http_info(**kwargs) - - self.describe_index_stats1 = _Endpoint( - settings={ - "response_type": (DescribeIndexStatsResponse,), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/describe_index_stats", - "operation_id": "describe_index_stats1", - "http_method": "GET", - "servers": None, - }, - params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": {}, - "attribute_map": {}, - "location_map": {}, - "collection_format_map": {}, - }, - headers_map={ - "accept": ["application/json"], - "content_type": [], - }, - api_client=api_client, - callable=__describe_index_stats1, + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params: + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method describe_index_stats" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + if _params["describe_index_stats_request"] is not None: + _body_params = _params["describe_index_stats_request"] + + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # noqa: E501 + + # set the HTTP header `Content-Type` + _content_types_list = _params.get( + "_content_type", self.api_client.select_header_content_type(["application/json"]) + ) + if _content_types_list: + _header_params["Content-Type"] = _content_types_list + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "200": "DescribeIndexStatsResponse", + } + + return self.api_client.call_api( + "/describe_index_stats", + "POST", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), + ) + + @validate_arguments + def describe_index_stats1(self, **kwargs) -> DescribeIndexStatsResponse: # noqa: E501 + """DescribeIndexStats # noqa: E501 + + The `DescribeIndexStats` operation returns statistics about the index's contents, including the vector count per namespace, the number of dimensions, and the index fullness. The index fullness result may be inaccurate during pod resizing; to get the status of a pod resizing process, use [`describe_index`](https://www.pinecone.io/docs/api/operation/describe_index/). # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.describe_index_stats1(async_req=True) + >>> result = thread.get() + + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: DescribeIndexStatsResponse + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the describe_index_stats1_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.describe_index_stats1_with_http_info(**kwargs) # noqa: E501 + + @validate_arguments + def describe_index_stats1_with_http_info(self, **kwargs) -> ApiResponse: # noqa: E501 + """DescribeIndexStats # noqa: E501 + + The `DescribeIndexStats` operation returns statistics about the index's contents, including the vector count per namespace, the number of dimensions, and the index fullness. The index fullness result may be inaccurate during pod resizing; to get the status of a pod resizing process, use [`describe_index`](https://www.pinecone.io/docs/api/operation/describe_index/). # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.describe_index_stats1_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(DescribeIndexStatsResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + _params = locals() + + _all_params = [] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] + ) + + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params: + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method describe_index_stats1" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # noqa: E501 + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "200": "DescribeIndexStatsResponse", + } + + return self.api_client.call_api( + "/describe_index_stats", + "GET", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), + ) + + @validate_arguments + def fetch( + self, + ids: Annotated[ + conlist(StrictStr), + Field(..., description="The vector IDs to fetch. Does not accept values containing spaces."), + ], + namespace: Optional[StrictStr] = None, + **kwargs, + ) -> FetchResponse: # noqa: E501 + """Fetch # noqa: E501 + + The `Fetch` operation looks up and returns vectors, by ID, from a single namespace. The returned vectors include the vector data and/or metadata. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.fetch(ids, namespace, async_req=True) + >>> result = thread.get() + + :param ids: The vector IDs to fetch. Does not accept values containing spaces. (required) + :type ids: List[str] + :param namespace: + :type namespace: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: FetchResponse + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the fetch_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.fetch_with_http_info(ids, namespace, **kwargs) # noqa: E501 + + @validate_arguments + def fetch_with_http_info( + self, + ids: Annotated[ + conlist(StrictStr), + Field(..., description="The vector IDs to fetch. Does not accept values containing spaces."), + ], + namespace: Optional[StrictStr] = None, + **kwargs, + ) -> ApiResponse: # noqa: E501 + """Fetch # noqa: E501 + + The `Fetch` operation looks up and returns vectors, by ID, from a single namespace. The returned vectors include the vector data and/or metadata. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.fetch_with_http_info(ids, namespace, async_req=True) + >>> result = thread.get() + + :param ids: The vector IDs to fetch. Does not accept values containing spaces. (required) + :type ids: List[str] + :param namespace: + :type namespace: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(FetchResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + _params = locals() + + _all_params = ["ids", "namespace"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] + ) + + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params: + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method fetch" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + + # process the query parameters + _query_params = [] + if _params.get("ids") is not None: # noqa: E501 + _query_params.append(("ids", _params["ids"])) + _collection_formats["ids"] = "multi" + + if _params.get("namespace") is not None: # noqa: E501 + _query_params.append(("namespace", _params["namespace"])) + + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # noqa: E501 + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "200": "FetchResponse", + } + + return self.api_client.call_api( + "/vectors/fetch", + "GET", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), + ) + + @validate_arguments + def query(self, query_request: QueryRequest, **kwargs) -> QueryResponse: # noqa: E501 + """Query # noqa: E501 + + The `Query` operation searches a namespace, using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.query(query_request, async_req=True) + >>> result = thread.get() + + :param query_request: (required) + :type query_request: QueryRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: QueryResponse + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the query_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.query_with_http_info(query_request, **kwargs) # noqa: E501 + + @validate_arguments + def query_with_http_info(self, query_request: QueryRequest, **kwargs) -> ApiResponse: # noqa: E501 + """Query # noqa: E501 + + The `Query` operation searches a namespace, using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.query_with_http_info(query_request, async_req=True) + >>> result = thread.get() + + :param query_request: (required) + :type query_request: QueryRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(QueryResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + _params = locals() + + _all_params = ["query_request"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] + ) + + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params: + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method query" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + if _params["query_request"] is not None: + _body_params = _params["query_request"] + + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # noqa: E501 + + # set the HTTP header `Content-Type` + _content_types_list = _params.get( + "_content_type", self.api_client.select_header_content_type(["application/json"]) + ) + if _content_types_list: + _header_params["Content-Type"] = _content_types_list + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "200": "QueryResponse", + } + + return self.api_client.call_api( + "/query", + "POST", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), ) - def __fetch(self, ids, **kwargs): - """Fetch # noqa: E501 - - The `Fetch` operation looks up and returns vectors by ID from a single namespace. The returned vectors include the vector data and metadata. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True - - >>> thread = api.fetch(ids, async_req=True) - >>> result = thread.get() - - Args: - ids ([str]): The vector IDs to fetch. Does not accept values containing spaces. - - Keyword Args: - namespace (str): [optional] - _return_http_data_only (bool): Response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. - Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. - Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. - Default is True. - _host_index (int/None): Specifies the index of the server - that we want to use. - Default is to read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - FetchResponse - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - kwargs["ids"] = ids - return self.call_with_http_info(**kwargs) - - self.fetch = _Endpoint( - settings={ - "response_type": (FetchResponse,), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/vectors/fetch", - "operation_id": "fetch", - "http_method": "GET", - "servers": None, - }, - params_map={ - "all": [ - "ids", - "namespace", - ], - "required": [ - "ids", - ], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "ids": ([str],), - "namespace": (str,), - }, - "attribute_map": { - "ids": "ids", - "namespace": "namespace", - }, - "location_map": { - "ids": "query", - "namespace": "query", - }, - "collection_format_map": { - "ids": "multi", - }, - }, - headers_map={ - "accept": ["application/json"], - "content_type": [], - }, - api_client=api_client, - callable=__fetch, + @validate_arguments + def update(self, update_request: UpdateRequest, **kwargs) -> object: # noqa: E501 + """Update # noqa: E501 + + The `Update` operation updates vector in a namespace. If a value is included, it will overwrite the previous value. If a set_metadata is included, the values of the fields specified in it will be added or overwrite the previous value. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.update(update_request, async_req=True) + >>> result = thread.get() + + :param update_request: (required) + :type update_request: UpdateRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: object + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the update_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.update_with_http_info(update_request, **kwargs) # noqa: E501 + + @validate_arguments + def update_with_http_info(self, update_request: UpdateRequest, **kwargs) -> ApiResponse: # noqa: E501 + """Update # noqa: E501 + + The `Update` operation updates vector in a namespace. If a value is included, it will overwrite the previous value. If a set_metadata is included, the values of the fields specified in it will be added or overwrite the previous value. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.update_with_http_info(update_request, async_req=True) + >>> result = thread.get() + + :param update_request: (required) + :type update_request: UpdateRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(object, status_code(int), headers(HTTPHeaderDict)) + """ + + _params = locals() + + _all_params = ["update_request"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] ) - def __query(self, query_request, **kwargs): - """Query # noqa: E501 - - The `Query` operation searches a namespace using a query vector. It retrieves the IDs of the most similar records in a namespace along with their similarity scores. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True - - >>> thread = api.query(query_request, async_req=True) - >>> result = thread.get() - - Args: - query_request (QueryRequest): - - Keyword Args: - _return_http_data_only (bool): Return response data without head status - code and headers. Default is True. - _preload_content (bool): If False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. - Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. - Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. - Default is True. - _host_index (int/None): Specifies the index of the server - that we want to use. - Default is to read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - QueryResponse - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - kwargs["query_request"] = query_request - return self.call_with_http_info(**kwargs) - - self.query = _Endpoint( - settings={ - "response_type": (QueryResponse,), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/query", - "operation_id": "query", - "http_method": "POST", - "servers": None, - }, - params_map={ - "all": [ - "query_request", - ], - "required": [ - "query_request", - ], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "query_request": (QueryRequest,), - }, - "attribute_map": {}, - "location_map": { - "query_request": "body", - }, - "collection_format_map": {}, - }, - headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, - api_client=api_client, - callable=__query, + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params: + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method update" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + if _params["update_request"] is not None: + _body_params = _params["update_request"] + + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # noqa: E501 + + # set the HTTP header `Content-Type` + _content_types_list = _params.get( + "_content_type", self.api_client.select_header_content_type(["application/json"]) + ) + if _content_types_list: + _header_params["Content-Type"] = _content_types_list + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "200": "object", + } + + return self.api_client.call_api( + "/vectors/update", + "POST", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), ) - def __update(self, update_request, **kwargs): - """Update # noqa: E501 - - The `Update` operation updates a vector in a namespace. If a value is included, it overwrites the previous value. If set_metadata is included, the values of the fields specified in it are added to or overwrite the previous values. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True - - >>> thread = api.update(update_request, async_req=True) - >>> result = thread.get() - - Args: - update_request (UpdateRequest): - - Keyword Args: - _return_http_data_only (bool): Response head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. - Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. - Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. - Default is True. - _host_index (int/None): Specifies the index of the server - that we want to use. - Default is to read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - {str: (bool, date, datetime, dict, float, int, list, str, none_type)} - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - kwargs["update_request"] = update_request - return self.call_with_http_info(**kwargs) - - self.update = _Endpoint( - settings={ - "response_type": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/vectors/update", - "operation_id": "update", - "http_method": "POST", - "servers": None, - }, - params_map={ - "all": [ - "update_request", - ], - "required": [ - "update_request", - ], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "update_request": (UpdateRequest,), - }, - "attribute_map": {}, - "location_map": { - "update_request": "body", - }, - "collection_format_map": {}, - }, - headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, - api_client=api_client, - callable=__update, + @validate_arguments + def upsert(self, upsert_request: UpsertRequest, **kwargs) -> UpsertResponse: # noqa: E501 + """Upsert # noqa: E501 + + The `Upsert` operation writes vectors into a namespace. If a new value is upserted for an existing vector id, it will overwrite the previous value. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.upsert(upsert_request, async_req=True) + >>> result = thread.get() + + :param upsert_request: (required) + :type upsert_request: UpsertRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _request_timeout: timeout setting for this request. + If one number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: UpsertResponse + """ + kwargs["_return_http_data_only"] = True + if "_preload_content" in kwargs: + message = "Error! Please call the upsert_with_http_info method with `_preload_content` instead and obtain raw data from ApiResponse.raw_data" # noqa: E501 + raise ValueError(message) + return self.upsert_with_http_info(upsert_request, **kwargs) # noqa: E501 + + @validate_arguments + def upsert_with_http_info(self, upsert_request: UpsertRequest, **kwargs) -> ApiResponse: # noqa: E501 + """Upsert # noqa: E501 + + The `Upsert` operation writes vectors into a namespace. If a new value is upserted for an existing vector id, it will overwrite the previous value. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.upsert_with_http_info(upsert_request, async_req=True) + >>> result = thread.get() + + :param upsert_request: (required) + :type upsert_request: UpsertRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. + :type _preload_content: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :type _return_http_data_only: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_auth: dict, optional + :type _content_type: string, optional: force content-type for the request + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(UpsertResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + _params = locals() + + _all_params = ["upsert_request"] + _all_params.extend( + [ + "async_req", + "_return_http_data_only", + "_preload_content", + "_request_timeout", + "_request_auth", + "_content_type", + "_headers", + ] ) - def __upsert(self, upsert_request, **kwargs): - """Upsert # noqa: E501 - - The `Upsert` operation writes vectors into a namespace. If a new value is upserted for an existing vector ID, it overwrites the previous value. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True - - >>> thread = api.upsert(upsert_request, async_req=True) - >>> result = thread.get() - - Args: - upsert_request (UpsertRequest): - - Keyword Args: - _return_http_data_only (bool): Return response data without head status - code and headers. Default is True. - _preload_content (bool): if False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. - Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also - be a pair (tuple) of (connection, read) timeouts. - Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. - Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. - Default is True. - _host_index (int/None): Specifies the index of the server - that we want to use. - Default is to read from the configuration. - async_req (bool): execute request asynchronously - - Returns: - UpsertResponse - If the method is called asynchronously, returns the request - thread. - """ - kwargs["async_req"] = kwargs.get("async_req", False) - kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) - kwargs["_preload_content"] = kwargs.get("_preload_content", True) - kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) - kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) - kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) - kwargs["_host_index"] = kwargs.get("_host_index") - kwargs["upsert_request"] = upsert_request - return self.call_with_http_info(**kwargs) - - self.upsert = _Endpoint( - settings={ - "response_type": (UpsertResponse,), - "auth": ["ApiKeyAuth"], - "endpoint_path": "/vectors/upsert", - "operation_id": "upsert", - "http_method": "POST", - "servers": None, - }, - params_map={ - "all": [ - "upsert_request", - ], - "required": [ - "upsert_request", - ], - "nullable": [], - "enum": [], - "validation": [], - }, - root_map={ - "validations": {}, - "allowed_values": {}, - "openapi_types": { - "upsert_request": (UpsertRequest,), - }, - "attribute_map": {}, - "location_map": { - "upsert_request": "body", - }, - "collection_format_map": {}, - }, - headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, - api_client=api_client, - callable=__upsert, + # validate the arguments + for _key, _val in _params["kwargs"].items(): + if _key not in _all_params: + raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method upsert" % _key) + _params[_key] = _val + del _params["kwargs"] + + _collection_formats = {} + + # process the path parameters + _path_params = {} + + # process the query parameters + _query_params = [] + # process the header parameters + _header_params = dict(_params.get("_headers", {})) + # process the form parameters + _form_params = [] + _files = {} + # process the body parameter + _body_params = None + if _params["upsert_request"] is not None: + _body_params = _params["upsert_request"] + + # set the HTTP header `Accept` + _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) # noqa: E501 + + # set the HTTP header `Content-Type` + _content_types_list = _params.get( + "_content_type", self.api_client.select_header_content_type(["application/json"]) + ) + if _content_types_list: + _header_params["Content-Type"] = _content_types_list + + # authentication setting + _auth_settings = ["ApiKeyAuth"] # noqa: E501 + + _response_types_map = { + "200": "UpsertResponse", + } + + return self.api_client.call_api( + "/vectors/upsert", + "POST", + _path_params, + _query_params, + _header_params, + body=_body_params, + post_params=_form_params, + files=_files, + response_types_map=_response_types_map, + auth_settings=_auth_settings, + async_req=_params.get("async_req"), + _return_http_data_only=_params.get("_return_http_data_only"), # noqa: E501 + _preload_content=_params.get("_preload_content", True), + _request_timeout=_params.get("_request_timeout"), + collection_formats=_collection_formats, + _request_auth=_params.get("_request_auth"), ) diff --git a/pinecone/core/client/api_client.py b/pinecone/core/client/api_client.py index 8084c532..3c5bf896 100644 --- a/pinecone/core/client/api_client.py +++ b/pinecone/core/client/api_client.py @@ -1,46 +1,38 @@ +# coding: utf-8 + """ Pinecone API - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) The version of the OpenAPI document: version not set Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 -import json import atexit +import datetime +from dateutil.parser import parse +import json import mimetypes from multiprocessing.pool import ThreadPool -import io import os import re -import typing -from urllib.parse import quote -from urllib3.fields import RequestField +import tempfile +from urllib.parse import quote -from pinecone.core.client import rest from pinecone.core.client.configuration import Configuration -from pinecone.core.client.exceptions import ApiTypeError, ApiValueError, ApiException -from pinecone.core.client.model_utils import ( - ModelNormal, - ModelSimple, - ModelComposed, - check_allowed_values, - check_validations, - date, - datetime, - deserialize_file, - file_type, - model_to_dict, - none_type, - validate_and_convert_types, -) - - -class ApiClient(object): +from pinecone.core.client.api_response import ApiResponse +import pinecone.core.client.models +from pinecone.core.client import rest +from pinecone.core.client.exceptions import ApiValueError, ApiException + + +class ApiClient: """Generic API client for OpenAPI client library builds. OpenAPI generic API client. This client handles the client- @@ -48,10 +40,6 @@ class ApiClient(object): the methods and models for each application are generated from the OpenAPI templates. - NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - Do not edit the class manually. - :param configuration: .Configuration object for this client :param header_name: a header to pass when making calls to the API. :param header_value: a header value to pass when making calls to @@ -62,11 +50,23 @@ class ApiClient(object): to the API. More threads means more concurrent API requests. """ + PRIMITIVE_TYPES = (float, bool, bytes, str, int) + NATIVE_TYPES_MAPPING = { + "int": int, + "long": int, # TODO remove as only py3 is supported? + "float": float, + "str": str, + "bool": bool, + "date": datetime.date, + "datetime": datetime.datetime, + "object": object, + } _pool = None - def __init__(self, configuration=None, header_name=None, header_value=None, cookie=None, pool_threads=1): + def __init__(self, configuration=None, header_name=None, header_value=None, cookie=None, pool_threads=1) -> None: + # use default configuration if none is provided if configuration is None: - configuration = Configuration.get_default_copy() + configuration = Configuration.get_default() self.configuration = configuration self.pool_threads = pool_threads @@ -77,6 +77,7 @@ def __init__(self, configuration=None, header_name=None, header_value=None, cook self.cookie = cookie # Set default User-Agent. self.user_agent = "OpenAPI-Generator/1.0.0/python" + self.client_side_validation = configuration.client_side_validation def __enter__(self): return self @@ -114,24 +115,50 @@ def user_agent(self, value): def set_default_header(self, header_name, header_value): self.default_headers[header_name] = header_value + _default = None + + @classmethod + def get_default(cls): + """Return new instance of ApiClient. + + This method returns newly created, based on default constructor, + object of ApiClient class or returns a copy of default + ApiClient. + + :return: The ApiClient object. + """ + if cls._default is None: + cls._default = ApiClient() + return cls._default + + @classmethod + def set_default(cls, default): + """Set default instance of ApiClient. + + It stores default ApiClient. + + :param default: object of ApiClient. + """ + cls._default = default + def __call_api( self, - resource_path: str, - method: str, - path_params: typing.Optional[typing.Dict[str, typing.Any]] = None, - query_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, - header_params: typing.Optional[typing.Dict[str, typing.Any]] = None, - body: typing.Optional[typing.Any] = None, - post_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, - files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None, - response_type: typing.Optional[typing.Tuple[typing.Any]] = None, - auth_settings: typing.Optional[typing.List[str]] = None, - _return_http_data_only: typing.Optional[bool] = None, - collection_formats: typing.Optional[typing.Dict[str, str]] = None, - _preload_content: bool = True, - _request_timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None, - _host: typing.Optional[str] = None, - _check_type: typing.Optional[bool] = None, + resource_path, + method, + path_params=None, + query_params=None, + header_params=None, + body=None, + post_params=None, + files=None, + response_types_map=None, + auth_settings=None, + _return_http_data_only=None, + collection_formats=None, + _preload_content=True, + _request_timeout=None, + _host=None, + _request_auth=None, ): config = self.configuration @@ -152,27 +179,22 @@ def __call_api( # specified safe chars, encode everything resource_path = resource_path.replace("{%s}" % k, quote(str(v), safe=config.safe_chars_for_path_param)) - # query parameters - if query_params: - query_params = self.sanitize_for_serialization(query_params) - query_params = self.parameters_to_tuples(query_params, collection_formats) - # post parameters if post_params or files: post_params = post_params if post_params else [] post_params = self.sanitize_for_serialization(post_params) post_params = self.parameters_to_tuples(post_params, collection_formats) post_params.extend(self.files_parameters(files)) - if header_params["Content-Type"].startswith("multipart"): - post_params = self.parameters_to_multipart(post_params, (dict)) + + # auth setting + self.update_params_for_auth( + header_params, query_params, auth_settings, resource_path, method, body, request_auth=_request_auth + ) # body if body: body = self.sanitize_for_serialization(body) - # auth setting - self.update_params_for_auth(header_params, query_params, auth_settings, resource_path, method, body) - # request url if _host is None: url = self.configuration.host + resource_path @@ -180,6 +202,12 @@ def __call_api( # use server/host defined in path or operation instead url = _host + resource_path + # query parameters + if query_params: + query_params = self.sanitize_for_serialization(query_params) + url_query = self.parameters_to_url_query(query_params, collection_formats) + url += "?" + url_query + try: # perform request and return response response_data = self.request( @@ -193,60 +221,51 @@ def __call_api( _request_timeout=_request_timeout, ) except ApiException as e: - e.body = e.body.decode("utf-8") + if e.body: + e.body = e.body.decode("utf-8") raise e self.last_response = response_data - return_data = response_data - - if not _preload_content: - return return_data - return return_data + return_data = None # assuming derialization is not needed + # data needs deserialization or returns HTTP data (deserialized) only + if _preload_content or _return_http_data_only: + response_type = response_types_map.get(str(response_data.status), None) + if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599: + # if not found, look for '1XX', '2XX', etc. + response_type = response_types_map.get(str(response_data.status)[0] + "XX", None) - # deserialize response data - if response_type: - if response_type != (file_type,): - encoding = "utf-8" + if response_type == "bytearray": + response_data.data = response_data.data + else: + match = None content_type = response_data.getheader("content-type") if content_type is not None: - match = re.search(r"charset=([a-zA-Z\-\d]+)[\s\;]?", content_type) - if match: - encoding = match.group(1) + match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type) + encoding = match.group(1) if match else "utf-8" response_data.data = response_data.data.decode(encoding) - return_data = self.deserialize(response_data, response_type, _check_type) - else: - return_data = None + # deserialize response data + if response_type == "bytearray": + return_data = response_data.data + elif response_type: + return_data = self.deserialize(response_data, response_type) + else: + return_data = None if _return_http_data_only: return return_data else: - return (return_data, response_data.status, response_data.getheaders()) + return ApiResponse( + status_code=response_data.status, + data=return_data, + headers=response_data.getheaders(), + raw_data=response_data.data, + ) - def parameters_to_multipart(self, params, collection_types): - """Get parameters as list of tuples, formatting as json if value is collection_types + def sanitize_for_serialization(self, obj): + """Builds a JSON POST object. - :param params: Parameters as list of two-tuples - :param dict collection_types: Parameter collection types - :return: Parameters as list of tuple or urllib3.fields.RequestField - """ - new_params = [] - if collection_types is None: - collection_types = dict - for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 - if isinstance(v, collection_types): # v is instance of collection_type, formatting as application/json - v = json.dumps(v, ensure_ascii=False).encode("utf-8") - field = RequestField(k, v) - field.make_multipart(content_type="application/json; charset=utf-8") - new_params.append(field) - else: - new_params.append((k, v)) - return new_params - - @classmethod - def sanitize_for_serialization(cls, obj): - """Prepares data for transmission before it is sent with the rest client If obj is None, return None. If obj is str, int, long, float, bool, return directly. If obj is datetime.datetime, datetime.date @@ -254,84 +273,111 @@ def sanitize_for_serialization(cls, obj): If obj is list, sanitize each element in the list. If obj is dict, return the dict. If obj is OpenAPI model, return the properties dict. - If obj is io.IOBase, return the bytes + :param obj: The data to serialize. :return: The serialized form of data. """ - if isinstance(obj, (ModelNormal, ModelComposed)): - return {key: cls.sanitize_for_serialization(val) for key, val in model_to_dict(obj, serialize=True).items()} - elif isinstance(obj, io.IOBase): - return cls.get_file_data_and_close_file(obj) - elif isinstance(obj, (str, int, float, none_type, bool)): + if obj is None: + return None + elif isinstance(obj, self.PRIMITIVE_TYPES): return obj - elif isinstance(obj, (datetime, date)): + elif isinstance(obj, list): + return [self.sanitize_for_serialization(sub_obj) for sub_obj in obj] + elif isinstance(obj, tuple): + return tuple(self.sanitize_for_serialization(sub_obj) for sub_obj in obj) + elif isinstance(obj, (datetime.datetime, datetime.date)): return obj.isoformat() - elif isinstance(obj, ModelSimple): - return cls.sanitize_for_serialization(obj.value) - elif isinstance(obj, (list, tuple)): - return [cls.sanitize_for_serialization(item) for item in obj] + if isinstance(obj, dict): - return {key: cls.sanitize_for_serialization(val) for key, val in obj.items()} - raise ApiValueError("Unable to prepare type {} for serialization".format(obj.__class__.__name__)) + obj_dict = obj + else: + # Convert model obj to dict except + # attributes `openapi_types`, `attribute_map` + # and attributes which value is not None. + # Convert attribute name to json key in + # model definition for request. + obj_dict = obj.to_dict() + + return {key: self.sanitize_for_serialization(val) for key, val in obj_dict.items()} - def deserialize(self, response, response_type, _check_type): + def deserialize(self, response, response_type): """Deserializes response into an object. :param response: RESTResponse object to be deserialized. - :param response_type: For the response, a tuple containing: - valid classes - a list containing valid classes (for list schemas) - a dict containing a tuple of valid classes as the value - Example values: - (str,) - (Pet,) - (float, none_type) - ([int, none_type],) - ({str: (bool, str, int, float, date, datetime, str, none_type)},) - :param _check_type: boolean, whether to check the types of the data - received from the server - :type _check_type: bool + :param response_type: class literal for + deserialized object, or string of class name. :return: deserialized object. """ # handle file downloading # save response body into a tmp file and return the instance - if response_type == (file_type,): - content_disposition = response.getheader("Content-Disposition") - return deserialize_file(response.data, self.configuration, content_disposition=content_disposition) + if response_type == "file": + return self.__deserialize_file(response) # fetch data from response object try: - received_data = json.loads(response.data) + data = json.loads(response.data) except ValueError: - received_data = response.data + data = response.data - # store our data under the key of 'received_data' so users have some - # context if they are deserializing a string and the data type is wrong - deserialized_data = validate_and_convert_types( - received_data, response_type, ["received_data"], True, _check_type, configuration=self.configuration - ) - return deserialized_data + return self.__deserialize(data, response_type) + + def __deserialize(self, data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if isinstance(klass, str): + if klass.startswith("List["): + sub_kls = re.match(r"List\[(.*)]", klass).group(1) + return [self.__deserialize(sub_data, sub_kls) for sub_data in data] + + if klass.startswith("Dict["): + sub_kls = re.match(r"Dict\[([^,]*), (.*)]", klass).group(2) + return {k: self.__deserialize(v, sub_kls) for k, v in data.items()} + + # convert str to class + if klass in self.NATIVE_TYPES_MAPPING: + klass = self.NATIVE_TYPES_MAPPING[klass] + else: + klass = getattr(pinecone.core.client.models, klass) + + if klass in self.PRIMITIVE_TYPES: + return self.__deserialize_primitive(data, klass) + elif klass == object: + return self.__deserialize_object(data) + elif klass == datetime.date: + return self.__deserialize_date(data) + elif klass == datetime.datetime: + return self.__deserialize_datetime(data) + else: + return self.__deserialize_model(data, klass) def call_api( self, - resource_path: str, - method: str, - path_params: typing.Optional[typing.Dict[str, typing.Any]] = None, - query_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, - header_params: typing.Optional[typing.Dict[str, typing.Any]] = None, - body: typing.Optional[typing.Any] = None, - post_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, - files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None, - response_type: typing.Optional[typing.Tuple[typing.Any]] = None, - auth_settings: typing.Optional[typing.List[str]] = None, - async_req: typing.Optional[bool] = None, - _return_http_data_only: typing.Optional[bool] = None, - collection_formats: typing.Optional[typing.Dict[str, str]] = None, - _preload_content: bool = True, - _request_timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None, - _host: typing.Optional[str] = None, - _check_type: typing.Optional[bool] = None, + resource_path, + method, + path_params=None, + query_params=None, + header_params=None, + body=None, + post_params=None, + files=None, + response_types_map=None, + auth_settings=None, + async_req=None, + _return_http_data_only=None, + collection_formats=None, + _preload_content=True, + _request_timeout=None, + _host=None, + _request_auth=None, ): """Makes the HTTP request (synchronous) and returns deserialized data. @@ -347,38 +393,26 @@ def call_api( :param post_params dict: Request post form parameters, for `application/x-www-form-urlencoded`, `multipart/form-data`. :param auth_settings list: Auth Settings names for the request. - :param response_type: For the response, a tuple containing: - valid classes - a list containing valid classes (for list schemas) - a dict containing a tuple of valid classes as the value - Example values: - (str,) - (Pet,) - (float, none_type) - ([int, none_type],) - ({str: (bool, str, int, float, date, datetime, str, none_type)},) - :param files: key -> field name, value -> a list of open file - objects for `multipart/form-data`. - :type files: dict + :param response: Response data type. + :param files dict: key -> filename, value -> filepath, + for `multipart/form-data`. :param async_req bool: execute request asynchronously - :type async_req: bool, optional - :param _return_http_data_only: response data without head status code - and headers - :type _return_http_data_only: bool, optional + :param _return_http_data_only: response data instead of ApiResponse + object with status code, headers, etc + :param _preload_content: if False, the ApiResponse.data will + be set to none and raw_data will store the + HTTP response body without reading/decoding. + Default is True. :param collection_formats: dict of collection formats for path, query, header, and post parameters. - :type collection_formats: dict, optional - :param _preload_content: if False, the urllib3.HTTPResponse object will - be returned without reading/decoding response - data. Default is True. - :type _preload_content: bool, optional :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. - :param _check_type: boolean describing if the data back from the server - should have its type checked. - :type _check_type: bool, optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :type _request_token: dict, optional :return: If async_req parameter is True, the request will be called asynchronously. @@ -396,14 +430,14 @@ def call_api( body, post_params, files, - response_type, + response_types_map, auth_settings, _return_http_data_only, collection_formats, _preload_content, _request_timeout, _host, - _check_type, + _request_auth, ) return self.pool.apply_async( @@ -417,14 +451,14 @@ def call_api( body, post_params, files, - response_type, + response_types_map, auth_settings, _return_http_data_only, collection_formats, _preload_content, _request_timeout, _host, - _check_type, + _request_auth, ), ) @@ -441,7 +475,7 @@ def request( ): """Makes the HTTP request using RESTClient.""" if method == "GET": - return self.rest_client.GET( + return self.rest_client.get_request( url, query_params=query_params, _preload_content=_preload_content, @@ -449,7 +483,7 @@ def request( headers=headers, ) elif method == "HEAD": - return self.rest_client.HEAD( + return self.rest_client.head_request( url, query_params=query_params, _preload_content=_preload_content, @@ -457,17 +491,15 @@ def request( headers=headers, ) elif method == "OPTIONS": - return self.rest_client.OPTIONS( + return self.rest_client.options_request( url, query_params=query_params, headers=headers, - post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, - body=body, ) elif method == "POST": - return self.rest_client.POST( + return self.rest_client.post_request( url, query_params=query_params, headers=headers, @@ -477,7 +509,7 @@ def request( body=body, ) elif method == "PUT": - return self.rest_client.PUT( + return self.rest_client.put_request( url, query_params=query_params, headers=headers, @@ -487,7 +519,7 @@ def request( body=body, ) elif method == "PATCH": - return self.rest_client.PATCH( + return self.rest_client.patch_request( url, query_params=query_params, headers=headers, @@ -497,7 +529,7 @@ def request( body=body, ) elif method == "DELETE": - return self.rest_client.DELETE( + return self.rest_client.delete_request( url, query_params=query_params, headers=headers, @@ -537,39 +569,62 @@ def parameters_to_tuples(self, params, collection_formats): new_params.append((k, v)) return new_params - @staticmethod - def get_file_data_and_close_file(file_instance: io.IOBase) -> bytes: - file_data = file_instance.read() - file_instance.close() - return file_data + def parameters_to_url_query(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. - def files_parameters(self, files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None): + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: URL query string (e.g. a=Hello%20World&b=123) + """ + new_params = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 + if isinstance(v, (int, float)): + v = str(v) + if isinstance(v, bool): + v = str(v).lower() + if isinstance(v, dict): + v = json.dumps(v) + + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == "multi": + new_params.extend((k, value) for value in v) + else: + if collection_format == "ssv": + delimiter = " " + elif collection_format == "tsv": + delimiter = "\t" + elif collection_format == "pipes": + delimiter = "|" + else: # csv is the default + delimiter = "," + new_params.append((k, delimiter.join(quote(str(value)) for value in v))) + else: + new_params.append((k, quote(str(v)))) + + return "&".join(["=".join(item) for item in new_params]) + + def files_parameters(self, files=None): """Builds form parameters. - :param files: None or a dict with key=param_name and - value is a list of open file objects - :return: List of tuples of form parameters with file data + :param files: File parameters. + :return: Form parameters with files. """ - if files is None: - return [] - params = [] - for param_name, file_instances in files.items(): - if file_instances is None: - # if the file field is nullable, skip None values - continue - for file_instance in file_instances: - if file_instance is None: - # if the file field is nullable, skip None values + + if files: + for k, v in files.items(): + if not v: continue - if file_instance.closed is True: - raise ApiValueError( - "Cannot read a closed file. The passed in file_type " "for %s must be open." % param_name - ) - filename = os.path.basename(file_instance.name) - filedata = self.get_file_data_and_close_file(file_instance) - mimetype = mimetypes.guess_type(filename)[0] or "application/octet-stream" - params.append(tuple([param_name, tuple([filename, filedata, mimetype])])) + file_names = v if type(v) is list else [v] + for n in file_names: + with open(n, "rb") as f: + filename = os.path.basename(f.name) + filedata = f.read() + mimetype = mimetypes.guess_type(filename)[0] or "application/octet-stream" + params.append(tuple([k, tuple([filename, filedata, mimetype])])) return params @@ -582,12 +637,11 @@ def select_header_accept(self, accepts): if not accepts: return - accepts = [x.lower() for x in accepts] + for accept in accepts: + if re.search("json", accept, re.IGNORECASE): + return accept - if "application/json" in accepts: - return "application/json" - else: - return ", ".join(accepts) + return accepts[0] def select_header_content_type(self, content_types): """Returns `Content-Type` based on an array of content_types provided. @@ -596,247 +650,139 @@ def select_header_content_type(self, content_types): :return: Content-Type (e.g. application/json). """ if not content_types: - return "application/json" + return None - content_types = [x.lower() for x in content_types] + for content_type in content_types: + if re.search("json", content_type, re.IGNORECASE): + return content_type - if "application/json" in content_types or "*/*" in content_types: - return "application/json" - else: - return content_types[0] + return content_types[0] - def update_params_for_auth(self, headers, querys, auth_settings, resource_path, method, body): + def update_params_for_auth(self, headers, queries, auth_settings, resource_path, method, body, request_auth=None): """Updates header and query params based on authentication setting. :param headers: Header parameters dict to be updated. - :param querys: Query parameters tuple list to be updated. + :param queries: Query parameters tuple list to be updated. :param auth_settings: Authentication setting identifiers list. - :param resource_path: A string representation of the HTTP request resource path. - :param method: A string representation of the HTTP request method. - :param body: A object representing the body of the HTTP request. - The object type is the return value of _encoder.default(). + :resource_path: A string representation of the HTTP request resource path. + :method: A string representation of the HTTP request method. + :body: A object representing the body of the HTTP request. + The object type is the return value of sanitize_for_serialization(). + :param request_auth: if set, the provided settings will + override the token in the configuration. """ if not auth_settings: return + if request_auth: + self._apply_auth_params(headers, queries, resource_path, method, body, request_auth) + return + for auth in auth_settings: auth_setting = self.configuration.auth_settings().get(auth) if auth_setting: - if auth_setting["in"] == "cookie": - headers["Cookie"] = auth_setting["value"] - elif auth_setting["in"] == "header": - if auth_setting["type"] != "http-signature": - headers[auth_setting["key"]] = auth_setting["value"] - elif auth_setting["in"] == "query": - querys.append((auth_setting["key"], auth_setting["value"])) - else: - raise ApiValueError("Authentication token must be in `query` or `header`") - - -class Endpoint(object): - def __init__(self, settings=None, params_map=None, root_map=None, headers_map=None, api_client=None, callable=None): - """Creates an endpoint - - Args: - settings (dict): see below key value pairs - 'response_type' (tuple/None): response type - 'auth' (list): a list of auth type keys - 'endpoint_path' (str): the endpoint path - 'operation_id' (str): endpoint string identifier - 'http_method' (str): POST/PUT/PATCH/GET etc - 'servers' (list): list of str servers that this endpoint is at - params_map (dict): see below key value pairs - 'all' (list): list of str endpoint parameter names - 'required' (list): list of required parameter names - 'nullable' (list): list of nullable parameter names - 'enum' (list): list of parameters with enum values - 'validation' (list): list of parameters with validations - root_map - 'validations' (dict): the dict mapping endpoint parameter tuple - paths to their validation dictionaries - 'allowed_values' (dict): the dict mapping endpoint parameter - tuple paths to their allowed_values (enum) dictionaries - 'openapi_types' (dict): param_name to openapi type - 'attribute_map' (dict): param_name to camelCase name - 'location_map' (dict): param_name to 'body', 'file', 'form', - 'header', 'path', 'query' - collection_format_map (dict): param_name to `csv` etc. - headers_map (dict): see below key value pairs - 'accept' (list): list of Accept header strings - 'content_type' (list): list of Content-Type header strings - api_client (ApiClient) api client instance - callable (function): the function which is invoked when the - Endpoint is called + self._apply_auth_params(headers, queries, resource_path, method, body, auth_setting) + + def _apply_auth_params(self, headers, queries, resource_path, method, body, auth_setting): + """Updates the request parameters based on a single auth_setting + + :param headers: Header parameters dict to be updated. + :param queries: Query parameters tuple list to be updated. + :resource_path: A string representation of the HTTP request resource path. + :method: A string representation of the HTTP request method. + :body: A object representing the body of the HTTP request. + The object type is the return value of sanitize_for_serialization(). + :param auth_setting: auth settings for the endpoint """ - self.settings = settings - self.params_map = params_map - self.params_map["all"].extend( - [ - "async_req", - "_host_index", - "_preload_content", - "_request_timeout", - "_return_http_data_only", - "_check_input_type", - "_check_return_type", - ] - ) - self.params_map["nullable"].extend(["_request_timeout"]) - self.validations = root_map["validations"] - self.allowed_values = root_map["allowed_values"] - self.openapi_types = root_map["openapi_types"] - extra_types = { - "async_req": (bool,), - "_host_index": (none_type, int), - "_preload_content": (bool,), - "_request_timeout": (none_type, float, (float,), [float], int, (int,), [int]), - "_return_http_data_only": (bool,), - "_check_input_type": (bool,), - "_check_return_type": (bool,), - } - self.openapi_types.update(extra_types) - self.attribute_map = root_map["attribute_map"] - self.location_map = root_map["location_map"] - self.collection_format_map = root_map["collection_format_map"] - self.headers_map = headers_map - self.api_client = api_client - self.callable = callable - - def __validate_inputs(self, kwargs): - for param in self.params_map["enum"]: - if param in kwargs: - check_allowed_values(self.allowed_values, (param,), kwargs[param]) - - for param in self.params_map["validation"]: - if param in kwargs: - check_validations( - self.validations, (param,), kwargs[param], configuration=self.api_client.configuration - ) - - if kwargs["_check_input_type"] is False: - return + if auth_setting["in"] == "cookie": + headers["Cookie"] = auth_setting["value"] + elif auth_setting["in"] == "header": + if auth_setting["type"] != "http-signature": + headers[auth_setting["key"]] = auth_setting["value"] + elif auth_setting["in"] == "query": + queries.append((auth_setting["key"], auth_setting["value"])) + else: + raise ApiValueError("Authentication token must be in `query` or `header`") - for key, value in kwargs.items(): - fixed_val = validate_and_convert_types( - value, - self.openapi_types[key], - [key], - False, - kwargs["_check_input_type"], - configuration=self.api_client.configuration, - ) - kwargs[key] = fixed_val - - def __gather_params(self, kwargs): - params = {"body": None, "collection_format": {}, "file": {}, "form": [], "header": {}, "path": {}, "query": []} - - for param_name, param_value in kwargs.items(): - param_location = self.location_map.get(param_name) - if param_location is None: - continue - if param_location: - if param_location == "body": - params["body"] = param_value - continue - base_name = self.attribute_map[param_name] - if param_location == "form" and self.openapi_types[param_name] == (file_type,): - params["file"][param_name] = [param_value] - elif param_location == "form" and self.openapi_types[param_name] == ([file_type],): - # param_value is already a list - params["file"][param_name] = param_value - elif param_location in {"form", "query"}: - param_value_full = (base_name, param_value) - params[param_location].append(param_value_full) - if param_location not in {"form", "query"}: - params[param_location][base_name] = param_value - collection_format = self.collection_format_map.get(param_name) - if collection_format: - params["collection_format"][base_name] = collection_format + def __deserialize_file(self, response): + """Deserializes body to file - return params + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + :param response: RESTResponse. + :return: file path. + """ + fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + content_disposition = response.getheader("Content-Disposition") + if content_disposition: + filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', content_disposition).group(1) + path = os.path.join(os.path.dirname(path), filename) + + with open(path, "wb") as f: + f.write(response.data) - def __call__(self, *args, **kwargs): - """This method is invoked when endpoints are called - Example: + return path - api_instance = IndexOperationsApi() - api_instance.configure_index # this is an instance of the class Endpoint - api_instance.configure_index() # this invokes api_instance.configure_index.__call__() - which then invokes the callable functions stored in that endpoint at - api_instance.configure_index.callable or self.callable in this class + def __deserialize_primitive(self, data, klass): + """Deserializes string to primitive type. + :param data: str. + :param klass: class literal. + + :return: int, long, float, str, bool. """ - return self.callable(self, *args, **kwargs) + try: + return klass(data) + except UnicodeEncodeError: + return str(data) + except TypeError: + return data - def call_with_http_info(self, **kwargs): + def __deserialize_object(self, value): + """Return an original value. + + :return: object. + """ + return value + + def __deserialize_date(self, string): + """Deserializes string to date. + + :param string: str. + :return: date. + """ try: - index = ( - self.api_client.configuration.server_operation_index.get( - self.settings["operation_id"], self.api_client.configuration.server_index - ) - if kwargs["_host_index"] is None - else kwargs["_host_index"] - ) - server_variables = self.api_client.configuration.server_operation_variables.get( - self.settings["operation_id"], self.api_client.configuration.server_variables - ) - _host = self.api_client.configuration.get_host_from_settings( - index, variables=server_variables, servers=self.settings["servers"] - ) - except IndexError: - if self.settings["servers"]: - raise ApiValueError("Invalid host index. Must be 0 <= index < %s" % len(self.settings["servers"])) - _host = None - - for key, value in kwargs.items(): - if key not in self.params_map["all"]: - raise ApiTypeError( - "Got an unexpected parameter '%s'" " to method `%s`" % (key, self.settings["operation_id"]) - ) - # only throw this nullable ApiValueError if _check_input_type - # is False, if _check_input_type==True we catch this case - # in self.__validate_inputs - if key not in self.params_map["nullable"] and value is None and kwargs["_check_input_type"] is False: - raise ApiValueError( - "Value may not be None for non-nullable parameter `%s`" - " when calling `%s`" % (key, self.settings["operation_id"]) - ) - - for key in self.params_map["required"]: - if key not in kwargs.keys(): - raise ApiValueError( - "Missing the required parameter `%s` when calling " "`%s`" % (key, self.settings["operation_id"]) - ) - - self.__validate_inputs(kwargs) - - params = self.__gather_params(kwargs) - - accept_headers_list = self.headers_map["accept"] - if accept_headers_list: - params["header"]["Accept"] = self.api_client.select_header_accept(accept_headers_list) - - content_type_headers_list = self.headers_map["content_type"] - if content_type_headers_list: - header_list = self.api_client.select_header_content_type(content_type_headers_list) - params["header"]["Content-Type"] = header_list - - return self.api_client.call_api( - self.settings["endpoint_path"], - self.settings["http_method"], - params["path"], - params["query"], - params["header"], - body=params["body"], - post_params=params["form"], - files=params["file"], - response_type=self.settings["response_type"], - auth_settings=self.settings["auth"], - async_req=kwargs["async_req"], - _check_type=kwargs["_check_return_type"], - _return_http_data_only=kwargs["_return_http_data_only"], - _preload_content=kwargs["_preload_content"], - _request_timeout=kwargs["_request_timeout"], - _host=_host, - collection_formats=params["collection_format"], - ) + return parse(string).date() + except ImportError: + return string + except ValueError: + raise rest.ApiException(status=0, reason="Failed to parse `{0}` as date object".format(string)) + + def __deserialize_datetime(self, string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :return: datetime. + """ + try: + return parse(string) + except ImportError: + return string + except ValueError: + raise rest.ApiException(status=0, reason=("Failed to parse `{0}` as datetime object".format(string))) + + def __deserialize_model(self, data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :param klass: class literal. + :return: model object. + """ + + return klass.from_dict(data) diff --git a/pinecone/core/client/api_response.py b/pinecone/core/client/api_response.py new file mode 100644 index 00000000..7bc03cb6 --- /dev/null +++ b/pinecone/core/client/api_response.py @@ -0,0 +1,22 @@ +"""API response object.""" + +from __future__ import annotations +from typing import Any, Dict, Optional +from pydantic import Field, StrictInt, StrictStr + + +class ApiResponse: + """ + API response object + """ + + status_code: Optional[StrictInt] = Field(None, description="HTTP status code") + headers: Optional[Dict[StrictStr, StrictStr]] = Field(None, description="HTTP headers") + data: Optional[Any] = Field(None, description="Deserialized data given the data type") + raw_data: Optional[Any] = Field(None, description="Raw data (HTTP response body)") + + def __init__(self, status_code=None, headers=None, data=None, raw_data=None) -> None: + self.status_code = status_code + self.headers = headers + self.data = data + self.raw_data = raw_data diff --git a/pinecone/core/client/apis/__init__.py b/pinecone/core/client/apis/__init__.py deleted file mode 100644 index 88516b08..00000000 --- a/pinecone/core/client/apis/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# flake8: noqa - -# Import all APIs into this package. -# If you have many APIs here with many many models used in each API this may -# raise a `RecursionError`. -# In order to avoid this, import only the API that you directly need like: -# -# from .api.index_operations_api import IndexOperationsApi -# -# or import this package, but before doing it, use: -# -# import sys -# sys.setrecursionlimit(n) - -# Import APIs into API package: -from pinecone.core.client.api.index_operations_api import IndexOperationsApi -from pinecone.core.client.api.vector_operations_api import VectorOperationsApi diff --git a/pinecone/core/client/configuration.py b/pinecone/core/client/configuration.py index 2c6baf43..7ebb57e5 100644 --- a/pinecone/core/client/configuration.py +++ b/pinecone/core/client/configuration.py @@ -1,12 +1,16 @@ +# coding: utf-8 + """ Pinecone API - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) The version of the OpenAPI document: version not set Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 import copy @@ -15,9 +19,7 @@ import sys import urllib3 -from http import client as http_client -from pinecone.core.client.exceptions import ApiValueError - +import http.client as httplib JSON_SCHEMA_VALIDATION_KEYWORDS = { "multipleOf", @@ -33,46 +35,20 @@ } -class Configuration(object): - """NOTE: This class is auto generated by OpenAPI Generator - - Ref: https://openapi-generator.tech - Do not edit the class manually. +class Configuration: + """This class contains various settings of the API client. - :param host: Base url + :param host: Base url. :param api_key: Dict to store API key(s). Each entry in the dict specifies an API key. The dict key is the name of the security scheme in the OAS specification. The dict value is the API key secret. - :param api_key_prefix: Dict to store API prefix (e.g. Bearer) + :param api_key_prefix: Dict to store API prefix (e.g. Bearer). The dict key is the name of the security scheme in the OAS specification. The dict value is an API key prefix when generating the auth data. - :param username: Username for HTTP basic authentication - :param password: Password for HTTP basic authentication - :param discard_unknown_keys: Boolean value indicating whether to discard - unknown properties. A server may send a response that includes additional - properties that are not known by the client in the following scenarios: - 1. The OpenAPI document is incomplete, i.e. it does not match the server - implementation. - 2. The client was generated using an older version of the OpenAPI document - and the server has been upgraded since then. - If a schema in the OpenAPI document defines the additionalProperties attribute, - then all undeclared properties received by the server are injected into the - additional properties map. In that case, there are undeclared properties, and - nothing to discard. - :param disabled_client_side_validations (string): Comma-separated list of - JSON schema validation keywords to disable JSON schema structural validation - rules. The following keywords may be specified: multipleOf, maximum, - exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern, - maxItems, minItems. - By default, the validation is performed for data generated locally by the client - and data received from the server, independent of any validation performed by - the server side. If the input data does not satisfy the JSON schema validation - rules specified in the OpenAPI document, an exception is raised. - If disabled_client_side_validations is set, structural validation is - disabled. This can be useful to troubleshoot data validation problem, such as - when the OpenAPI document validation rules do not match the actual API data - received by the server. + :param username: Username for HTTP basic authentication. + :param password: Password for HTTP basic authentication. + :param access_token: Access token. :param server_index: Index to servers configuration. :param server_variables: Mapping with string values to replace variables in templated server configuration. The validation of enums is performed for @@ -81,9 +57,10 @@ class Configuration(object): configuration. :param server_operation_variables: Mapping from operation ID to a mapping with string values to replace variables in templated server configuration. - The validation of enums is performed for variables with defined enum values before. + The validation of enums is performed for variables with defined enum + values before. :param ssl_ca_cert: str - the path to a file of concatenated CA certificates - in PEM format + in PEM format. :Example: @@ -114,17 +91,15 @@ def __init__( host=None, api_key=None, api_key_prefix=None, - access_token=None, username=None, password=None, - discard_unknown_keys=False, - disabled_client_side_validations="", + access_token=None, server_index=None, server_variables=None, server_operation_index=None, server_operation_variables=None, ssl_ca_cert=None, - ): + ) -> None: """Constructor""" self._base_path = "https://unknown-unknown.svc.unknown.pinecone.io" if host is None else host """Default Base url @@ -141,7 +116,6 @@ def __init__( """Temp file folder for downloading files """ # Authentication Settings - self.access_token = access_token self.api_key = {} if api_key: self.api_key = api_key @@ -161,8 +135,9 @@ def __init__( self.password = password """Password for HTTP basic authentication """ - self.discard_unknown_keys = discard_unknown_keys - self.disabled_client_side_validations = disabled_client_side_validations + self.access_token = access_token + """Access token + """ self.logger = {} """Logging Settings """ @@ -201,6 +176,10 @@ def __init__( self.assert_hostname = None """Set this to True/False to enable/disable SSL hostname verification. """ + self.tls_server_name = None + """SSL/TLS Server Name Indication (SNI) + Set this to the SNI value expected by the server. + """ self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 """urllib3 connection pool's maximum number of connections saved @@ -225,8 +204,17 @@ def __init__( # Enable client side validation self.client_side_validation = True - # Options to pass down to the underlying urllib3 socket self.socket_options = None + """Options to pass down to the underlying urllib3 socket + """ + + self.datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z" + """datetime format + """ + + self.date_format = "%Y-%m-%d" + """date format + """ def __deepcopy__(self, memo): cls = self.__class__ @@ -244,12 +232,6 @@ def __deepcopy__(self, memo): def __setattr__(self, name, value): object.__setattr__(self, name, value) - if name == "disabled_client_side_validations": - s = set(filter(None, value.split(","))) - for v in s: - if v not in JSON_SCHEMA_VALIDATION_KEYWORDS: - raise ApiValueError("Invalid keyword: '{0}''".format(v)) - self._disabled_client_side_validations = s @classmethod def set_default(cls, default): @@ -260,21 +242,31 @@ def set_default(cls, default): :param default: object of Configuration """ - cls._default = copy.deepcopy(default) + cls._default = default @classmethod def get_default_copy(cls): - """Return new instance of configuration. + """Deprecated. Please use `get_default` instead. + + Deprecated. Please use `get_default` instead. + + :return: The configuration object. + """ + return cls.get_default() + + @classmethod + def get_default(cls): + """Return the default configuration. This method returns newly created, based on default constructor, object of Configuration class or returns a copy of default - configuration passed by the set_default method. + configuration. :return: The configuration object. """ - if cls._default is not None: - return copy.deepcopy(cls._default) - return Configuration() + if cls._default is None: + cls._default = Configuration() + return cls._default @property def logger_file(self): @@ -328,15 +320,15 @@ def debug(self, value): # if debug status is True, turn on debug logging for _, logger in self.logger.items(): logger.setLevel(logging.DEBUG) - # turn on http_client debug - http_client.HTTPConnection.debuglevel = 1 + # turn on httplib debug + httplib.HTTPConnection.debuglevel = 1 else: # if debug status is False, turn off debug logging, # setting log level to default `logging.WARNING` for _, logger in self.logger.items(): logger.setLevel(logging.WARNING) - # turn off http_client debug - http_client.HTTPConnection.debuglevel = 0 + # turn off httplib debug + httplib.HTTPConnection.debuglevel = 0 @property def logger_format(self): diff --git a/pinecone/core/client/exceptions.py b/pinecone/core/client/exceptions.py index 5cb822bc..7a26ac07 100644 --- a/pinecone/core/client/exceptions.py +++ b/pinecone/core/client/exceptions.py @@ -1,23 +1,24 @@ +# coding: utf-8 + """ Pinecone API - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) The version of the OpenAPI document: version not set Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - + Generated by OpenAPI Generator (https://openapi-generator.tech) -from pinecone.core.exceptions import PineconeException + Do not edit the class manually. +""" # noqa: E501 -class OpenApiException(PineconeException): +class OpenApiException(Exception): """The base exception class for all OpenAPIExceptions""" class ApiTypeError(OpenApiException, TypeError): - def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None): + def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None) -> None: """Raises an exception for TypeErrors Args: @@ -45,7 +46,7 @@ def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None): class ApiValueError(OpenApiException, ValueError): - def __init__(self, msg, path_to_item=None): + def __init__(self, msg, path_to_item=None) -> None: """ Args: msg (str): the exception message @@ -63,7 +64,7 @@ def __init__(self, msg, path_to_item=None): class ApiAttributeError(OpenApiException, AttributeError): - def __init__(self, msg, path_to_item=None): + def __init__(self, msg, path_to_item=None) -> None: """ Raised when an attribute reference or assignment fails. @@ -82,7 +83,7 @@ def __init__(self, msg, path_to_item=None): class ApiKeyError(OpenApiException, KeyError): - def __init__(self, msg, path_to_item=None): + def __init__(self, msg, path_to_item=None) -> None: """ Args: msg (str): the exception message @@ -99,7 +100,7 @@ def __init__(self, msg, path_to_item=None): class ApiException(OpenApiException): - def __init__(self, status=None, reason=None, http_resp=None): + def __init__(self, status=None, reason=None, http_resp=None) -> None: if http_resp: self.status = http_resp.status self.reason = http_resp.reason @@ -123,23 +124,28 @@ def __str__(self): return error_message +class BadRequestException(ApiException): + def __init__(self, status=None, reason=None, http_resp=None) -> None: + super(BadRequestException, self).__init__(status, reason, http_resp) + + class NotFoundException(ApiException): - def __init__(self, status=None, reason=None, http_resp=None): + def __init__(self, status=None, reason=None, http_resp=None) -> None: super(NotFoundException, self).__init__(status, reason, http_resp) class UnauthorizedException(ApiException): - def __init__(self, status=None, reason=None, http_resp=None): + def __init__(self, status=None, reason=None, http_resp=None) -> None: super(UnauthorizedException, self).__init__(status, reason, http_resp) class ForbiddenException(ApiException): - def __init__(self, status=None, reason=None, http_resp=None): + def __init__(self, status=None, reason=None, http_resp=None) -> None: super(ForbiddenException, self).__init__(status, reason, http_resp) class ServiceException(ApiException): - def __init__(self, status=None, reason=None, http_resp=None): + def __init__(self, status=None, reason=None, http_resp=None) -> None: super(ServiceException, self).__init__(status, reason, http_resp) diff --git a/pinecone/core/client/model/__init__.py b/pinecone/core/client/model/__init__.py deleted file mode 100644 index cfe32b78..00000000 --- a/pinecone/core/client/model/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# we can not import model classes here because that would create a circular -# reference which would not work in python2 -# do not import all models into this module because that uses a lot of memory and stack frames -# if you need the ability to import all models from one package, import them with -# from {{packageName}.models import ModelA, ModelB diff --git a/pinecone/core/client/model/approximated_config.py b/pinecone/core/client/model/approximated_config.py deleted file mode 100644 index 4c237d89..00000000 --- a/pinecone/core/client/model/approximated_config.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class ApproximatedConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "k_bits": (int,), # noqa: E501 - "hybrid": (bool,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "k_bits": "k_bits", # noqa: E501 - "hybrid": "hybrid", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """ApproximatedConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - k_bits (int): [optional] if omitted the server will use the default value of 512 # noqa: E501 - hybrid (bool): [optional] if omitted the server will use the default value of False # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """ApproximatedConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - k_bits (int): [optional] if omitted the server will use the default value of 512 # noqa: E501 - hybrid (bool): [optional] if omitted the server will use the default value of False # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/collection_meta.py b/pinecone/core/client/model/collection_meta.py deleted file mode 100644 index de2f5b6b..00000000 --- a/pinecone/core/client/model/collection_meta.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class CollectionMeta(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "name": (str,), # noqa: E501 - "size": (int,), # noqa: E501 - "status": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "name": "name", # noqa: E501 - "size": "size", # noqa: E501 - "status": "status", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """CollectionMeta - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - size (int): The size of the collection in bytes.. [optional] # noqa: E501 - status (str): The status of the collection.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """CollectionMeta - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - size (int): The size of the collection in bytes.. [optional] # noqa: E501 - status (str): The status of the collection.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/create_collection_request.py b/pinecone/core/client/model/create_collection_request.py deleted file mode 100644 index 47379c0e..00000000 --- a/pinecone/core/client/model/create_collection_request.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class CreateCollectionRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "name": (str,), # noqa: E501 - "source": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "name": "name", # noqa: E501 - "source": "source", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, source, *args, **kwargs): # noqa: E501 - """CreateCollectionRequest - a model defined in OpenAPI - - Args: - name (str): The name of the collection to be created. - source (str): The name of the source index to be used as the source for the collection. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - self.source = source - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, name, source, *args, **kwargs): # noqa: E501 - """CreateCollectionRequest - a model defined in OpenAPI - - Args: - name (str): The name of the collection to be created. - source (str): The name of the source index to be used as the source for the collection. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - self.source = source - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/create_request.py b/pinecone/core/client/model/create_request.py deleted file mode 100644 index 1aa615c0..00000000 --- a/pinecone/core/client/model/create_request.py +++ /dev/null @@ -1,322 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class CreateRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "name": (str,), # noqa: E501 - "dimension": (int,), # noqa: E501 - "index_type": (str,), # noqa: E501 - "metric": (str,), # noqa: E501 - "pods": (int,), # noqa: E501 - "replicas": (int,), # noqa: E501 - "shards": (int,), # noqa: E501 - "pod_type": (str,), # noqa: E501 - "index_config": (dict,), # noqa: E501 - "metadata_config": ( - {str: (bool, date, datetime, dict, float, int, list, str, none_type)}, - none_type, - ), # noqa: E501 - "source_collection": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "name": "name", # noqa: E501 - "dimension": "dimension", # noqa: E501 - "index_type": "index_type", # noqa: E501 - "metric": "metric", # noqa: E501 - "pods": "pods", # noqa: E501 - "replicas": "replicas", # noqa: E501 - "shards": "shards", # noqa: E501 - "pod_type": "pod_type", # noqa: E501 - "index_config": "index_config", # noqa: E501 - "metadata_config": "metadata_config", # noqa: E501 - "source_collection": "source_collection", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, name, dimension, *args, **kwargs): # noqa: E501 - """CreateRequest - a model defined in OpenAPI - - Args: - name (str): The name of the index to be created. The maximum length is 45 characters. - dimension (int): The dimensions of the vectors to be inserted in the index - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - index_type (str): The type of vector index. Pinecone supports 'approximated'.. [optional] if omitted the server will use the default value of "approximated" # noqa: E501 - metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'.. [optional] if omitted the server will use the default value of "cosine" # noqa: E501 - pods (int): The number of pods for the index to use,including replicas.. [optional] if omitted the server will use the default value of 1 # noqa: E501 - replicas (int): The number of replicas. Replicas duplicate your index. They provide higher availability and throughput.. [optional] if omitted the server will use the default value of 1 # noqa: E501 - shards (int): The number of shards to be used in the index.. [optional] if omitted the server will use the default value of 1 # noqa: E501 - pod_type (str): The type of pod to use. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`.. [optional] if omitted the server will use the default value of "p1.x1" # noqa: E501 - index_config (dict): [optional] # noqa: E501 - metadata_config ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}, none_type): Configuration for the behavior of Pinecone's internal metadata index. By default, all metadata is indexed; when `metadata_config` is present, only specified metadata fields are indexed. To specify metadata fields to index, provide a JSON object of the following form: ``` {\"indexed\": [\"example_metadata_field\"]} ``` . [optional] # noqa: E501 - source_collection (str): The name of the collection to create an index from. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - self.dimension = dimension - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, name, dimension, *args, **kwargs): # noqa: E501 - """CreateRequest - a model defined in OpenAPI - - Args: - name (str): The name of the index to be created. The maximum length is 45 characters. - dimension (int): The dimensions of the vectors to be inserted in the index - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - index_type (str): The type of vector index. Pinecone supports 'approximated'.. [optional] if omitted the server will use the default value of "approximated" # noqa: E501 - metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'.. [optional] if omitted the server will use the default value of "cosine" # noqa: E501 - pods (int): The number of pods for the index to use,including replicas.. [optional] if omitted the server will use the default value of 1 # noqa: E501 - replicas (int): The number of replicas. Replicas duplicate your index. They provide higher availability and throughput.. [optional] if omitted the server will use the default value of 1 # noqa: E501 - shards (int): The number of shards to be used in the index.. [optional] if omitted the server will use the default value of 1 # noqa: E501 - pod_type (str): The type of pod to use. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`.. [optional] if omitted the server will use the default value of "p1.x1" # noqa: E501 - index_config (dict): [optional] # noqa: E501 - metadata_config ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}, none_type): Configuration for the behavior of Pinecone's internal metadata index. By default, all metadata is indexed; when `metadata_config` is present, only specified metadata fields are indexed. To specify metadata fields to index, provide a JSON object of the following form: ``` {\"indexed\": [\"example_metadata_field\"]} ``` . [optional] # noqa: E501 - source_collection (str): The name of the collection to create an index from. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.name = name - self.dimension = dimension - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/delete_request.py b/pinecone/core/client/model/delete_request.py deleted file mode 100644 index d6028904..00000000 --- a/pinecone/core/client/model/delete_request.py +++ /dev/null @@ -1,285 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class DeleteRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = { - ("ids",): {}, - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "ids": ([str],), # noqa: E501 - "delete_all": (bool,), # noqa: E501 - "namespace": (str,), # noqa: E501 - "filter": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "ids": "ids", # noqa: E501 - "delete_all": "deleteAll", # noqa: E501 - "namespace": "namespace", # noqa: E501 - "filter": "filter", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """DeleteRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - ids ([str]): Vectors to delete.. [optional] # noqa: E501 - delete_all (bool): This indicates that all vectors in the index namespace should be deleted.. [optional] if omitted the server will use the default value of False # noqa: E501 - namespace (str): The namespace to delete vectors from, if applicable.. [optional] # noqa: E501 - filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See https://www.pinecone.io/docs/metadata-filtering/.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """DeleteRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - ids ([str]): Vectors to delete.. [optional] # noqa: E501 - delete_all (bool): This indicates that all vectors in the index namespace should be deleted.. [optional] if omitted the server will use the default value of False # noqa: E501 - namespace (str): The namespace to delete vectors from, if applicable.. [optional] # noqa: E501 - filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See https://www.pinecone.io/docs/metadata-filtering/.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/describe_index_stats_request.py b/pinecone/core/client/model/describe_index_stats_request.py deleted file mode 100644 index 05c73506..00000000 --- a/pinecone/core/client/model/describe_index_stats_request.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class DescribeIndexStatsRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "filter": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "filter": "filter", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """DescribeIndexStatsRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See https://www.pinecone.io/docs/metadata-filtering/.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """DescribeIndexStatsRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See https://www.pinecone.io/docs/metadata-filtering/.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/describe_index_stats_response.py b/pinecone/core/client/model/describe_index_stats_response.py deleted file mode 100644 index 6e965ed9..00000000 --- a/pinecone/core/client/model/describe_index_stats_response.py +++ /dev/null @@ -1,291 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.namespace_summary import NamespaceSummary - - globals()["NamespaceSummary"] = NamespaceSummary - - -class DescribeIndexStatsResponse(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "namespaces": ({str: (NamespaceSummary,)},), # noqa: E501 - "dimension": (int,), # noqa: E501 - "index_fullness": (float,), # noqa: E501 - "total_vector_count": (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "namespaces": "namespaces", # noqa: E501 - "dimension": "dimension", # noqa: E501 - "index_fullness": "indexFullness", # noqa: E501 - "total_vector_count": "totalVectorCount", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """DescribeIndexStatsResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - namespaces ({str: (NamespaceSummary,)}): A mapping for each namespace in the index from the namespace name to a summary of its contents. If a metadata filter expression is present, the summary will reflect only vectors matching that expression.. [optional] # noqa: E501 - dimension (int): The dimension of the indexed vectors.. [optional] # noqa: E501 - index_fullness (float): The fullness of the index, regardless of whether a metadata filter expression was passed. The granularity of this metric is 10%.. [optional] # noqa: E501 - total_vector_count (int): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """DescribeIndexStatsResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - namespaces ({str: (NamespaceSummary,)}): A mapping for each namespace in the index from the namespace name to a summary of its contents. If a metadata filter expression is present, the summary will reflect only vectors matching that expression.. [optional] # noqa: E501 - dimension (int): The dimension of the indexed vectors.. [optional] # noqa: E501 - index_fullness (float): The fullness of the index, regardless of whether a metadata filter expression was passed. The granularity of this metric is 10%.. [optional] # noqa: E501 - total_vector_count (int): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/fetch_response.py b/pinecone/core/client/model/fetch_response.py deleted file mode 100644 index 37ed4b17..00000000 --- a/pinecone/core/client/model/fetch_response.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.vector import Vector - - globals()["Vector"] = Vector - - -class FetchResponse(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "vectors": ({str: (Vector,)},), # noqa: E501 - "namespace": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "vectors": "vectors", # noqa: E501 - "namespace": "namespace", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """FetchResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - vectors ({str: (Vector,)}): [optional] # noqa: E501 - namespace (str): The namespace of the vectors.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """FetchResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - vectors ({str: (Vector,)}): [optional] # noqa: E501 - namespace (str): The namespace of the vectors.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/hnsw_config.py b/pinecone/core/client/model/hnsw_config.py deleted file mode 100644 index ecf41422..00000000 --- a/pinecone/core/client/model/hnsw_config.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class HnswConfig(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "ef_construction": (int,), # noqa: E501 - "ef": (int,), # noqa: E501 - "m": (int,), # noqa: E501 - "max_elements": (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "ef_construction": "ef_construction", # noqa: E501 - "ef": "ef", # noqa: E501 - "m": "M", # noqa: E501 - "max_elements": "max_elements", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """HnswConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - ef_construction (int): [optional] if omitted the server will use the default value of 500 # noqa: E501 - ef (int): [optional] if omitted the server will use the default value of 250 # noqa: E501 - m (int): [optional] if omitted the server will use the default value of 12 # noqa: E501 - max_elements (int): [optional] if omitted the server will use the default value of 50000000 # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """HnswConfig - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - ef_construction (int): [optional] if omitted the server will use the default value of 500 # noqa: E501 - ef (int): [optional] if omitted the server will use the default value of 250 # noqa: E501 - m (int): [optional] if omitted the server will use the default value of 12 # noqa: E501 - max_elements (int): [optional] if omitted the server will use the default value of 50000000 # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/index_meta.py b/pinecone/core/client/model/index_meta.py deleted file mode 100644 index e2e9bd94..00000000 --- a/pinecone/core/client/model/index_meta.py +++ /dev/null @@ -1,279 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.index_meta_database import IndexMetaDatabase - - globals()["IndexMetaDatabase"] = IndexMetaDatabase - - -class IndexMeta(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "database": (IndexMetaDatabase,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "database": "database", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IndexMeta - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - database (IndexMetaDatabase): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IndexMeta - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - database (IndexMetaDatabase): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/index_meta_database.py b/pinecone/core/client/model/index_meta_database.py deleted file mode 100644 index a969abd8..00000000 --- a/pinecone/core/client/model/index_meta_database.py +++ /dev/null @@ -1,319 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.index_meta_database_status import IndexMetaDatabaseStatus - - globals()["IndexMetaDatabaseStatus"] = IndexMetaDatabaseStatus - - -class IndexMetaDatabase(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "name": (str,), # noqa: E501 - "dimensions": (str,), # noqa: E501 - "index_type": (str,), # noqa: E501 - "metric": (str,), # noqa: E501 - "pods": (int,), # noqa: E501 - "replicas": (int,), # noqa: E501 - "shards": (int,), # noqa: E501 - "pod_type": (str,), # noqa: E501 - "index_config": (dict,), # noqa: E501 - "metadata_config": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 - "status": (IndexMetaDatabaseStatus,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "name": "name", # noqa: E501 - "dimensions": "dimensions", # noqa: E501 - "index_type": "index_type", # noqa: E501 - "metric": "metric", # noqa: E501 - "pods": "pods", # noqa: E501 - "replicas": "replicas", # noqa: E501 - "shards": "shards", # noqa: E501 - "pod_type": "pod_type", # noqa: E501 - "index_config": "index_config", # noqa: E501 - "metadata_config": "metadata_config", # noqa: E501 - "status": "status", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IndexMetaDatabase - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - dimensions (str): [optional] # noqa: E501 - index_type (str): [optional] # noqa: E501 - metric (str): [optional] # noqa: E501 - pods (int): [optional] # noqa: E501 - replicas (int): [optional] # noqa: E501 - shards (int): [optional] # noqa: E501 - pod_type (str): [optional] # noqa: E501 - index_config (dict): [optional] # noqa: E501 - metadata_config ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501 - status (IndexMetaDatabaseStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IndexMetaDatabase - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - dimensions (str): [optional] # noqa: E501 - index_type (str): [optional] # noqa: E501 - metric (str): [optional] # noqa: E501 - pods (int): [optional] # noqa: E501 - replicas (int): [optional] # noqa: E501 - shards (int): [optional] # noqa: E501 - pod_type (str): [optional] # noqa: E501 - index_config (dict): [optional] # noqa: E501 - metadata_config ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501 - status (IndexMetaDatabaseStatus): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/index_meta_database_status.py b/pinecone/core/client/model/index_meta_database_status.py deleted file mode 100644 index 8430174f..00000000 --- a/pinecone/core/client/model/index_meta_database_status.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class IndexMetaDatabaseStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - ("state",): { - "INITIALIZING": "Initializing", - "SCALINGUP": "ScalingUp", - "SCALINGDOWN": "ScalingDown", - "TERMINATING": "Terminating", - "READY": "Ready", - }, - } - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "ready": (bool,), # noqa: E501 - "state": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "ready": "ready", # noqa: E501 - "state": "state", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IndexMetaDatabaseStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - ready (bool): [optional] # noqa: E501 - state (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IndexMetaDatabaseStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - ready (bool): [optional] # noqa: E501 - state (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/index_meta_status.py b/pinecone/core/client/model/index_meta_status.py deleted file mode 100644 index c9da0214..00000000 --- a/pinecone/core/client/model/index_meta_status.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Pinecone JSON API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class IndexMetaStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "crashed": ([str],), # noqa: E501 - "ready": (bool,), # noqa: E501 - "port": (int,), # noqa: E501 - "waiting": ([str],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "crashed": "crashed", # noqa: E501 - "ready": "ready", # noqa: E501 - "port": "port", # noqa: E501 - "waiting": "waiting", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IndexMetaStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - crashed ([str]): [optional] # noqa: E501 - ready (bool): [optional] # noqa: E501 - port (int): [optional] # noqa: E501 - waiting ([str]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """IndexMetaStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - crashed ([str]): [optional] # noqa: E501 - ready (bool): [optional] # noqa: E501 - port (int): [optional] # noqa: E501 - waiting ([str]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/namespace_summary.py b/pinecone/core/client/model/namespace_summary.py deleted file mode 100644 index 869672d0..00000000 --- a/pinecone/core/client/model/namespace_summary.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class NamespaceSummary(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "vector_count": (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "vector_count": "vectorCount", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """NamespaceSummary - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - vector_count (int): The number of vectors stored in this namespace. Note that updates to this field may lag behind updates to the underlying index and corresponding query results, etc.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """NamespaceSummary - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - vector_count (int): The number of vectors stored in this namespace. Note that updates to this field may lag behind updates to the underlying index and corresponding query results, etc.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/patch_request.py b/pinecone/core/client/model/patch_request.py deleted file mode 100644 index ac67785d..00000000 --- a/pinecone/core/client/model/patch_request.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class PatchRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "replicas": (int,), # noqa: E501 - "pod_type": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "replicas": "replicas", # noqa: E501 - "pod_type": "pod_type", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """PatchRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - replicas (int): The desired number of replicas for the index.. [optional] # noqa: E501 - pod_type (str): The new pod type for the index. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """PatchRequest - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - replicas (int): The desired number of replicas for the index.. [optional] # noqa: E501 - pod_type (str): The new pod type for the index. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/protobuf_any.py b/pinecone/core/client/model/protobuf_any.py deleted file mode 100644 index 220d4fc1..00000000 --- a/pinecone/core/client/model/protobuf_any.py +++ /dev/null @@ -1,275 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class ProtobufAny(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "type_url": (str,), # noqa: E501 - "value": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "type_url": "typeUrl", # noqa: E501 - "value": "value", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """ProtobufAny - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - type_url (str): [optional] # noqa: E501 - value (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """ProtobufAny - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - type_url (str): [optional] # noqa: E501 - value (str): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/protobuf_null_value.py b/pinecone/core/client/model/protobuf_null_value.py deleted file mode 100644 index 9cc94e88..00000000 --- a/pinecone/core/client/model/protobuf_null_value.py +++ /dev/null @@ -1,293 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class ProtobufNullValue(ModelSimple): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = { - ("value",): { - "NULL_VALUE": "NULL_VALUE", - }, - } - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "value": (str,), - } - - @cached_property - def discriminator(): - return None - - attribute_map = {} - - read_only_vars = set() - - _composed_schemas = None - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): - """ProtobufNullValue - a model defined in OpenAPI - - Note that value can be passed either in args or in kwargs, but not in both. - - Args: - args[0] (str): `NullValue` is a singleton enumeration to represent the null value for the `Value` type union. The JSON representation for `NullValue` is JSON `null`. - NULL_VALUE: Null value.. if omitted defaults to "NULL_VALUE", must be one of ["NULL_VALUE", ] # noqa: E501 - - Keyword Args: - value (str): `NullValue` is a singleton enumeration to represent the null value for the `Value` type union. The JSON representation for `NullValue` is JSON `null`. - NULL_VALUE: Null value.. if omitted defaults to "NULL_VALUE", must be one of ["NULL_VALUE", ] # noqa: E501 - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - # required up here when default value is not given - _path_to_item = kwargs.pop("_path_to_item", ()) - - if "value" in kwargs: - value = kwargs.pop("value") - elif args: - args = list(args) - value = args.pop(0) - else: - value = "NULL_VALUE" - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - self.value = value - if kwargs: - raise ApiTypeError( - "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." - % ( - kwargs, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): - """ProtobufNullValue - a model defined in OpenAPI - - Note that value can be passed either in args or in kwargs, but not in both. - - Args: - args[0] (str): `NullValue` is a singleton enumeration to represent the null value for the `Value` type union. The JSON representation for `NullValue` is JSON `null`. - NULL_VALUE: Null value.. if omitted defaults to "NULL_VALUE", must be one of ["NULL_VALUE", ] # noqa: E501 - - Keyword Args: - value (str): `NullValue` is a singleton enumeration to represent the null value for the `Value` type union. The JSON representation for `NullValue` is JSON `null`. - NULL_VALUE: Null value.. if omitted defaults to "NULL_VALUE", must be one of ["NULL_VALUE", ] # noqa: E501 - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - # required up here when default value is not given - _path_to_item = kwargs.pop("_path_to_item", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if "value" in kwargs: - value = kwargs.pop("value") - elif args: - args = list(args) - value = args.pop(0) - else: - value = "NULL_VALUE" - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - self.value = value - if kwargs: - raise ApiTypeError( - "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." - % ( - kwargs, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - return self diff --git a/pinecone/core/client/model/query_request.py b/pinecone/core/client/model/query_request.py deleted file mode 100644 index ec88c383..00000000 --- a/pinecone/core/client/model/query_request.py +++ /dev/null @@ -1,329 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.query_vector import QueryVector - from pinecone.core.client.model.sparse_values import SparseValues - - globals()["QueryVector"] = QueryVector - globals()["SparseValues"] = SparseValues - - -class QueryRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = { - ("top_k",): { - "inclusive_maximum": 10000, - "inclusive_minimum": 1, - }, - ("queries",): {}, - ("vector",): {}, - ("id",): { - "max_length": 512, - }, - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "top_k": (int,), # noqa: E501 - "namespace": (str,), # noqa: E501 - "filter": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 - "include_values": (bool,), # noqa: E501 - "include_metadata": (bool,), # noqa: E501 - "queries": ([QueryVector],), # noqa: E501 - "vector": ([float],), # noqa: E501 - "sparse_vector": (SparseValues,), # noqa: E501 - "id": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "top_k": "topK", # noqa: E501 - "namespace": "namespace", # noqa: E501 - "filter": "filter", # noqa: E501 - "include_values": "includeValues", # noqa: E501 - "include_metadata": "includeMetadata", # noqa: E501 - "queries": "queries", # noqa: E501 - "vector": "vector", # noqa: E501 - "sparse_vector": "sparseVector", # noqa: E501 - "id": "id", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, top_k, *args, **kwargs): # noqa: E501 - """QueryRequest - a model defined in OpenAPI - - Args: - top_k (int): The number of results to return for each query. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - namespace (str): The namespace to query.. [optional] # noqa: E501 - filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See https://www.pinecone.io/docs/metadata-filtering/.. [optional] # noqa: E501 - include_values (bool): Indicates whether vector values are included in the response.. [optional] if omitted the server will use the default value of False # noqa: E501 - include_metadata (bool): Indicates whether metadata is included in the response as well as the ids.. [optional] if omitted the server will use the default value of False # noqa: E501 - queries ([QueryVector]): DEPRECATED. The query vectors. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`.. [optional] # noqa: E501 - vector ([float]): The query vector. This should be the same length as the dimension of the index being queried. Each `query()` request can contain only one of the parameters `id` or `vector`.. [optional] # noqa: E501 - sparse_vector (SparseValues): [optional] # noqa: E501 - id (str): The unique ID of the vector to be used as a query vector. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.top_k = top_k - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, top_k, *args, **kwargs): # noqa: E501 - """QueryRequest - a model defined in OpenAPI - - Args: - top_k (int): The number of results to return for each query. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - namespace (str): The namespace to query.. [optional] # noqa: E501 - filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See https://www.pinecone.io/docs/metadata-filtering/.. [optional] # noqa: E501 - include_values (bool): Indicates whether vector values are included in the response.. [optional] if omitted the server will use the default value of False # noqa: E501 - include_metadata (bool): Indicates whether metadata is included in the response as well as the ids.. [optional] if omitted the server will use the default value of False # noqa: E501 - queries ([QueryVector]): DEPRECATED. The query vectors. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`.. [optional] # noqa: E501 - vector ([float]): The query vector. This should be the same length as the dimension of the index being queried. Each `query()` request can contain only one of the parameters `id` or `vector`.. [optional] # noqa: E501 - sparse_vector (SparseValues): The sparse values of the query vector [optional] # noqa: E501 - id (str): The unique ID of the vector to be used as a query vector. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.top_k = top_k - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/query_response.py b/pinecone/core/client/model/query_response.py deleted file mode 100644 index 85533762..00000000 --- a/pinecone/core/client/model/query_response.py +++ /dev/null @@ -1,289 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.scored_vector import ScoredVector - from pinecone.core.client.model.single_query_results import SingleQueryResults - - globals()["ScoredVector"] = ScoredVector - globals()["SingleQueryResults"] = SingleQueryResults - - -class QueryResponse(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "results": ([SingleQueryResults],), # noqa: E501 - "matches": ([ScoredVector],), # noqa: E501 - "namespace": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "results": "results", # noqa: E501 - "matches": "matches", # noqa: E501 - "namespace": "namespace", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """QueryResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - results ([SingleQueryResults]): DEPRECATED. The results of each query. The order is the same as `QueryRequest.queries`.. [optional] # noqa: E501 - matches ([ScoredVector]): The matches for the vectors.. [optional] # noqa: E501 - namespace (str): The namespace for the vectors.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """QueryResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - results ([SingleQueryResults]): DEPRECATED. The results of each query. The order is the same as `QueryRequest.queries`.. [optional] # noqa: E501 - matches ([ScoredVector]): The matches for the vectors.. [optional] # noqa: E501 - namespace (str): The namespace for the vectors.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/query_vector.py b/pinecone/core/client/model/query_vector.py deleted file mode 100644 index 5d52522e..00000000 --- a/pinecone/core/client/model/query_vector.py +++ /dev/null @@ -1,307 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.sparse_values import SparseValues - - globals()["SparseValues"] = SparseValues - - -class QueryVector(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = { - ("values",): {}, - ("top_k",): { - "inclusive_maximum": 10000, - "inclusive_minimum": 1, - }, - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "values": ([float],), # noqa: E501 - "sparse_values": (SparseValues,), # noqa: E501 - "top_k": (int,), # noqa: E501 - "namespace": (str,), # noqa: E501 - "filter": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "values": "values", # noqa: E501 - "sparse_values": "sparseValues", # noqa: E501 - "top_k": "topK", # noqa: E501 - "namespace": "namespace", # noqa: E501 - "filter": "filter", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, values, *args, **kwargs): # noqa: E501 - """QueryVector - a model defined in OpenAPI - - Args: - values ([float]): The query vector values. This should be the same length as the dimension of the index being queried. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - sparse_values (SparseValues): The sparse data of the query vector [optional] # noqa: E501 - top_k (int): An override for the number of results to return for this query vector.. [optional] # noqa: E501 - namespace (str): An override the namespace to search.. [optional] # noqa: E501 - filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): An override for the metadata filter to apply. This replaces the request-level filter.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.values = values - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, values, *args, **kwargs): # noqa: E501 - """QueryVector - a model defined in OpenAPI - - Args: - values ([float]): The query vector values. This should be the same length as the dimension of the index being queried. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - sparse_values (SparseValues): This is the sparse data of the vector [optional] # noqa: E501 - top_k (int): An override for the number of results to return for this query vector.. [optional] # noqa: E501 - namespace (str): An override the namespace to search.. [optional] # noqa: E501 - filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): An override for the metadata filter to apply. This replaces the request-level filter.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.values = values - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/rpc_status.py b/pinecone/core/client/model/rpc_status.py deleted file mode 100644 index 6d0c7605..00000000 --- a/pinecone/core/client/model/rpc_status.py +++ /dev/null @@ -1,287 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.protobuf_any import ProtobufAny - - globals()["ProtobufAny"] = ProtobufAny - - -class RpcStatus(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "code": (int,), # noqa: E501 - "message": (str,), # noqa: E501 - "details": ([ProtobufAny],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "code": "code", # noqa: E501 - "message": "message", # noqa: E501 - "details": "details", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """RpcStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - code (int): [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - details ([ProtobufAny]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """RpcStatus - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - code (int): [optional] # noqa: E501 - message (str): [optional] # noqa: E501 - details ([ProtobufAny]): [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/scored_vector.py b/pinecone/core/client/model/scored_vector.py deleted file mode 100644 index 98e7f958..00000000 --- a/pinecone/core/client/model/scored_vector.py +++ /dev/null @@ -1,306 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.sparse_values import SparseValues - - globals()["SparseValues"] = SparseValues - - -class ScoredVector(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = { - ("id",): { - "max_length": 512, - "min_length": 1, - }, - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "id": (str,), # noqa: E501 - "score": (float,), # noqa: E501 - "values": ([float],), # noqa: E501 - "sparse_values": (SparseValues,), # noqa: E501 - "metadata": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "id": "id", # noqa: E501 - "score": "score", # noqa: E501 - "values": "values", # noqa: E501 - "sparse_values": "sparseValues", # noqa: E501 - "metadata": "metadata", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, id, *args, **kwargs): # noqa: E501 - """ScoredVector - a model defined in OpenAPI - - Args: - id (str): This is the vector's unique id. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - score (float): This is a measure of similarity between this vector and the query vector. The higher the score, the more they are similar.. [optional] # noqa: E501 - values ([float]): This is the vector data, if it is requested.. [optional] # noqa: E501 - sparse_values (SparseValues): the sparse data of the vector [optional] # noqa: E501 - metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): This is the metadata, if it is requested.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.id = id - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, id, *args, **kwargs): # noqa: E501 - """ScoredVector - a model defined in OpenAPI - - Args: - id (str): This is the vector's unique id. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - score (float): This is a measure of similarity between this vector and the query vector. The higher the score, the more they are similar.. [optional] # noqa: E501 - values ([float]): This is the vector data, if it is requested.. [optional] # noqa: E501 - sparse_values (SparseValues): This is the sparse data of the vector [optional] # noqa: E501 - metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): This is the metadata, if it is requested.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.id = id - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/single_query_results.py b/pinecone/core/client/model/single_query_results.py deleted file mode 100644 index 9ff57289..00000000 --- a/pinecone/core/client/model/single_query_results.py +++ /dev/null @@ -1,283 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.scored_vector import ScoredVector - - globals()["ScoredVector"] = ScoredVector - - -class SingleQueryResults(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "matches": ([ScoredVector],), # noqa: E501 - "namespace": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "matches": "matches", # noqa: E501 - "namespace": "namespace", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """SingleQueryResults - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - matches ([ScoredVector]): The matches for the vectors.. [optional] # noqa: E501 - namespace (str): The namespace for the vectors.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """SingleQueryResults - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - matches ([ScoredVector]): The matches for the vectors.. [optional] # noqa: E501 - namespace (str): The namespace for the vectors.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/sparse_values.py b/pinecone/core/client/model/sparse_values.py deleted file mode 100644 index 08bc7133..00000000 --- a/pinecone/core/client/model/sparse_values.py +++ /dev/null @@ -1,286 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class SparseValues(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = { - ("indices",): {}, - ("values",): {}, - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "indices": ([int],), # noqa: E501 - "values": ([float],), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "indices": "indices", # noqa: E501 - "values": "values", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, indices, values, *args, **kwargs): # noqa: E501 - """SparseValues - a model defined in OpenAPI - - Args: - indices ([int]): - values ([float]): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.indices = indices - self.values = values - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, indices, values, *args, **kwargs): # noqa: E501 - """SparseValues - a model defined in OpenAPI - - Args: - indices ([int]): - values ([float]): - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.indices = indices - self.values = values - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/update_request.py b/pinecone/core/client/model/update_request.py deleted file mode 100644 index 404ec38d..00000000 --- a/pinecone/core/client/model/update_request.py +++ /dev/null @@ -1,307 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.sparse_values import SparseValues - - globals()["SparseValues"] = SparseValues - - -class UpdateRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = { - ("id",): { - "max_length": 512, - "min_length": 1, - }, - ("values",): {}, - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "id": (str,), # noqa: E501 - "values": ([float],), # noqa: E501 - "sparse_values": (SparseValues,), # noqa: E501 - "set_metadata": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 - "namespace": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "id": "id", # noqa: E501 - "values": "values", # noqa: E501 - "sparse_values": "sparseValues", # noqa: E501 - "set_metadata": "setMetadata", # noqa: E501 - "namespace": "namespace", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, id, *args, **kwargs): # noqa: E501 - """UpdateRequest - a model defined in OpenAPI - - Args: - id (str): Vector's unique id. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - values ([float]): Vector data.. [optional] # noqa: E501 - sparse_values (SparseValues): This is the sparse data of the vector to update [optional] # noqa: E501 - set_metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): Metadata to *set* for the vector.. [optional] # noqa: E501 - namespace (str): Namespace name where to update the vector.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.id = id - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, id, *args, **kwargs): # noqa: E501 - """UpdateRequest - a model defined in OpenAPI - - Args: - id (str): Vector's unique id. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - values ([float]): Vector data.. [optional] # noqa: E501 - sparse_values (SparseValues): This is the sparse data of the vector to update [optional] # noqa: E501 - set_metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): Metadata to *set* for the vector.. [optional] # noqa: E501 - namespace (str): Namespace name where to update the vector.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.id = id - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/upsert_request.py b/pinecone/core/client/model/upsert_request.py deleted file mode 100644 index 31990021..00000000 --- a/pinecone/core/client/model/upsert_request.py +++ /dev/null @@ -1,291 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.vector import Vector - - globals()["Vector"] = Vector - - -class UpsertRequest(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = { - ("vectors",): {}, - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "vectors": ([Vector],), # noqa: E501 - "namespace": (str,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "vectors": "vectors", # noqa: E501 - "namespace": "namespace", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, vectors, *args, **kwargs): # noqa: E501 - """UpsertRequest - a model defined in OpenAPI - - Args: - vectors ([Vector]): An array containing the vectors to upsert. Recommended batch limit is 100 vectors. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - namespace (str): This is the namespace name where you upsert vectors.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.vectors = vectors - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, vectors, *args, **kwargs): # noqa: E501 - """UpsertRequest - a model defined in OpenAPI - - Args: - vectors ([Vector]): An array containing the vectors to upsert. Recommended batch limit is 100 vectors. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - namespace (str): This is the namespace name where you upsert vectors.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.vectors = vectors - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/upsert_response.py b/pinecone/core/client/model/upsert_response.py deleted file mode 100644 index b766ec43..00000000 --- a/pinecone/core/client/model/upsert_response.py +++ /dev/null @@ -1,271 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -class UpsertResponse(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = {} - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - return { - "upserted_count": (int,), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "upserted_count": "upsertedCount", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """UpsertResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - upserted_count (int): The number of vectors upserted.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 - """UpsertResponse - a model defined in OpenAPI - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - upserted_count (int): The number of vectors upserted.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model/vector.py b/pinecone/core/client/model/vector.py deleted file mode 100644 index 58a4dbea..00000000 --- a/pinecone/core/client/model/vector.py +++ /dev/null @@ -1,305 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -import re # noqa: F401 -import sys # noqa: F401 - -from pinecone.core.client.model_utils import ( # noqa: F401 - ApiTypeError, - ModelComposed, - ModelNormal, - ModelSimple, - cached_property, - change_keys_js_to_python, - convert_js_args_to_python_args, - date, - datetime, - file_type, - none_type, - validate_get_composed_info, -) -from ..model_utils import OpenApiModel -from pinecone.core.client.exceptions import ApiAttributeError - - -def lazy_import(): - from pinecone.core.client.model.sparse_values import SparseValues - - globals()["SparseValues"] = SparseValues - - -class Vector(ModelNormal): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - - Attributes: - allowed_values (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - with a capitalized key describing the allowed value and an allowed - value. These dicts store the allowed enum values. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - discriminator_value_class_map (dict): A dict to go from the discriminator - variable value to the discriminator class name. - validations (dict): The key is the tuple path to the attribute - and the for var_name this is (var_name,). The value is a dict - that stores validations for max_length, min_length, max_items, - min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, - inclusive_minimum, and regex. - additional_properties_type (tuple): A tuple of classes accepted - as additional properties values. - """ - - allowed_values = {} - - validations = { - ("id",): { - "max_length": 512, - "min_length": 1, - }, - ("values",): {}, - } - - @cached_property - def additional_properties_type(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - """ - lazy_import() - return ( - bool, - date, - datetime, - dict, - float, - int, - list, - str, - none_type, - ) # noqa: E501 - - _nullable = False - - @cached_property - def openapi_types(): - """ - This must be a method because a model may have properties that are - of type self, this must run after the class is loaded - - Returns - openapi_types (dict): The key is attribute name - and the value is attribute type. - """ - lazy_import() - return { - "id": (str,), # noqa: E501 - "values": ([float],), # noqa: E501 - "sparse_values": (SparseValues,), # noqa: E501 - "metadata": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 - } - - @cached_property - def discriminator(): - return None - - attribute_map = { - "id": "id", # noqa: E501 - "values": "values", # noqa: E501 - "sparse_values": "sparseValues", # noqa: E501 - "metadata": "metadata", # noqa: E501 - } - - read_only_vars = {} - - _composed_schemas = {} - - @classmethod - @convert_js_args_to_python_args - def _from_openapi_data(cls, id, values, *args, **kwargs): # noqa: E501 - """Vector - a model defined in OpenAPI - - Args: - id (str): This is the vector's unique id. - values ([float]): This is the vector data included in the request. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - sparse_values (SparseValues): the sparse data of the returned vector [optional] # noqa: E501 - metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): This is the metadata included in the request.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - self = super(OpenApiModel, cls).__new__(cls) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.id = id - self.values = values - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - return self - - required_properties = set( - [ - "_data_store", - "_check_type", - "_spec_property_naming", - "_path_to_item", - "_configuration", - "_visited_composed_classes", - ] - ) - - @convert_js_args_to_python_args - def __init__(self, id, values, *args, **kwargs): # noqa: E501 - """Vector - a model defined in OpenAPI - - Args: - id (str): This is the vector's unique id. - values ([float]): This is the vector data included in the request. - - Keyword Args: - _check_type (bool): if True, values for parameters in openapi_types - will be type checked and a TypeError will be - raised if the wrong type is input. - Defaults to True - _path_to_item (tuple/list): This is a list of keys or values to - drill down to the model in received_data - when deserializing a response - _spec_property_naming (bool): True if the variable names in the input data - are serialized names, as specified in the OpenAPI document. - False if the variable names in the input data - are pythonic names, e.g. snake case (default) - _configuration (Configuration): the instance to use when - deserializing a file_type parameter. - If passed, type conversion is attempted - If omitted no type conversion is done. - _visited_composed_classes (tuple): This stores a tuple of - classes that we have traveled through so that - if we see that class again we will not use its - discriminator again. - When traveling through a discriminator, the - composed schema that is - is traveled through is added to this set. - For example if Animal has a discriminator - petType and we pass in "Dog", and the class Dog - allOf includes Animal, we move through Animal - once using the discriminator, and pick Dog. - Then in Dog, we will make an instance of the - Animal class but this time we won't travel - through its discriminator because we passed in - _visited_composed_classes = (Animal,) - sparse_values (SparseValues): This is the sparse data of the vector to update [optional] # noqa: E501 - metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): This is the metadata included in the request.. [optional] # noqa: E501 - """ - - _check_type = kwargs.pop("_check_type", True) - _spec_property_naming = kwargs.pop("_spec_property_naming", False) - _path_to_item = kwargs.pop("_path_to_item", ()) - _configuration = kwargs.pop("_configuration", None) - _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) - - if args: - raise ApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." - % ( - args, - self.__class__.__name__, - ), - path_to_item=_path_to_item, - valid_classes=(self.__class__,), - ) - - self._data_store = {} - self._check_type = _check_type - self._spec_property_naming = _spec_property_naming - self._path_to_item = _path_to_item - self._configuration = _configuration - self._visited_composed_classes = _visited_composed_classes + (self.__class__,) - - self.id = id - self.values = values - for var_name, var_value in kwargs.items(): - if ( - var_name not in self.attribute_map - and self._configuration is not None - and self._configuration.discard_unknown_keys - and self.additional_properties_type is None - ): - # discard variable. - continue - setattr(self, var_name, var_value) - if var_name in self.read_only_vars: - raise ApiAttributeError( - f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes." - ) diff --git a/pinecone/core/client/model_utils.py b/pinecone/core/client/model_utils.py deleted file mode 100644 index 73dc385d..00000000 --- a/pinecone/core/client/model_utils.py +++ /dev/null @@ -1,1884 +0,0 @@ -""" - Pinecone API - - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 - - The version of the OpenAPI document: version not set - Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" - - -from datetime import date, datetime # noqa: F401 -import inspect -import io -import os -import pprint -import re -import tempfile - -from dateutil.parser import parse - -from pinecone.core.client.exceptions import ( - ApiKeyError, - ApiAttributeError, - ApiTypeError, - ApiValueError, -) - -none_type = type(None) -file_type = io.IOBase - - -def convert_js_args_to_python_args(fn): - from functools import wraps - - @wraps(fn) - def wrapped_init(_self, *args, **kwargs): - """ - An attribute named `self` received from the api will conflicts with the reserved `self` - parameter of a class method. During generation, `self` attributes are mapped - to `_self` in models. Here, we name `_self` instead of `self` to avoid conflicts. - """ - spec_property_naming = kwargs.get("_spec_property_naming", False) - if spec_property_naming: - kwargs = change_keys_js_to_python(kwargs, _self if isinstance(_self, type) else _self.__class__) - return fn(_self, *args, **kwargs) - - return wrapped_init - - -class cached_property(object): - # this caches the result of the function call for fn with no inputs - # use this as a decorator on function methods that you want converted - # into cached properties - result_key = "_results" - - def __init__(self, fn): - self._fn = fn - - def __get__(self, instance, cls=None): - if self.result_key in vars(self): - return vars(self)[self.result_key] - else: - result = self._fn() - setattr(self, self.result_key, result) - return result - - -PRIMITIVE_TYPES = (list, float, int, bool, datetime, date, str, file_type) - - -def allows_single_value_input(cls): - """ - This function returns True if the input composed schema model or any - descendant model allows a value only input - This is true for cases where oneOf contains items like: - oneOf: - - float - - NumberWithValidation - - StringEnum - - ArrayModel - - null - TODO: lru_cache this - """ - if issubclass(cls, ModelSimple) or cls in PRIMITIVE_TYPES: - return True - elif issubclass(cls, ModelComposed): - if not cls._composed_schemas["oneOf"]: - return False - return any(allows_single_value_input(c) for c in cls._composed_schemas["oneOf"]) - return False - - -def composed_model_input_classes(cls): - """ - This function returns a list of the possible models that can be accepted as - inputs. - TODO: lru_cache this - """ - if issubclass(cls, ModelSimple) or cls in PRIMITIVE_TYPES: - return [cls] - elif issubclass(cls, ModelNormal): - if cls.discriminator is None: - return [cls] - else: - return get_discriminated_classes(cls) - elif issubclass(cls, ModelComposed): - if not cls._composed_schemas["oneOf"]: - return [] - if cls.discriminator is None: - input_classes = [] - for c in cls._composed_schemas["oneOf"]: - input_classes.extend(composed_model_input_classes(c)) - return input_classes - else: - return get_discriminated_classes(cls) - return [] - - -class OpenApiModel(object): - """The base class for all OpenAPIModels""" - - def set_attribute(self, name, value): - # this is only used to set properties on self - - path_to_item = [] - if self._path_to_item: - path_to_item.extend(self._path_to_item) - path_to_item.append(name) - - if name in self.openapi_types: - required_types_mixed = self.openapi_types[name] - elif self.additional_properties_type is None: - raise ApiAttributeError("{0} has no attribute '{1}'".format(type(self).__name__, name), path_to_item) - elif self.additional_properties_type is not None: - required_types_mixed = self.additional_properties_type - - if get_simple_class(name) != str: - error_msg = type_error_message(var_name=name, var_value=name, valid_classes=(str,), key_type=True) - raise ApiTypeError(error_msg, path_to_item=path_to_item, valid_classes=(str,), key_type=True) - - if self._check_type: - value = validate_and_convert_types( - value, - required_types_mixed, - path_to_item, - self._spec_property_naming, - self._check_type, - configuration=self._configuration, - ) - if (name,) in self.allowed_values: - check_allowed_values(self.allowed_values, (name,), value) - if (name,) in self.validations: - check_validations(self.validations, (name,), value, self._configuration) - self.__dict__["_data_store"][name] = value - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other - - def __setattr__(self, attr, value): - """set the value of an attribute using dot notation: `instance.attr = val`""" - self[attr] = value - - def __getattr__(self, attr): - """get the value of an attribute using dot notation: `instance.attr`""" - return self.get(attr) - - def __new__(cls, *args, **kwargs): - # this function uses the discriminator to - # pick a new schema/class to instantiate because a discriminator - # propertyName value was passed in - - if len(args) == 1: - arg = args[0] - if arg is None and is_type_nullable(cls): - # The input data is the 'null' value and the type is nullable. - return None - - if issubclass(cls, ModelComposed) and allows_single_value_input(cls): - model_kwargs = {} - oneof_instance = get_oneof_instance(cls, model_kwargs, kwargs, model_arg=arg) - return oneof_instance - - visited_composed_classes = kwargs.get("_visited_composed_classes", ()) - if cls.discriminator is None or cls in visited_composed_classes: - # Use case 1: this openapi schema (cls) does not have a discriminator - # Use case 2: we have already visited this class before and are sure that we - # want to instantiate it this time. We have visited this class deserializing - # a payload with a discriminator. During that process we traveled through - # this class but did not make an instance of it. Now we are making an - # instance of a composed class which contains cls in it, so this time make an instance of cls. - # - # Here's an example of use case 2: If Animal has a discriminator - # petType and we pass in "Dog", and the class Dog - # allOf includes Animal, we move through Animal - # once using the discriminator, and pick Dog. - # Then in the composed schema dog Dog, we will make an instance of the - # Animal class (because Dal has allOf: Animal) but this time we won't travel - # through Animal's discriminator because we passed in - # _visited_composed_classes = (Animal,) - - return super(OpenApiModel, cls).__new__(cls) - - # Get the name and value of the discriminator property. - # The discriminator name is obtained from the discriminator meta-data - # and the discriminator value is obtained from the input data. - discr_propertyname_py = list(cls.discriminator.keys())[0] - discr_propertyname_js = cls.attribute_map[discr_propertyname_py] - if discr_propertyname_js in kwargs: - discr_value = kwargs[discr_propertyname_js] - elif discr_propertyname_py in kwargs: - discr_value = kwargs[discr_propertyname_py] - else: - # The input data does not contain the discriminator property. - path_to_item = kwargs.get("_path_to_item", ()) - raise ApiValueError( - "Cannot deserialize input data due to missing discriminator. " - "The discriminator property '%s' is missing at path: %s" % (discr_propertyname_js, path_to_item) - ) - - # Implementation note: the last argument to get_discriminator_class - # is a list of visited classes. get_discriminator_class may recursively - # call itself and update the list of visited classes, and the initial - # value must be an empty list. Hence not using 'visited_composed_classes' - new_cls = get_discriminator_class(cls, discr_propertyname_py, discr_value, []) - if new_cls is None: - path_to_item = kwargs.get("_path_to_item", ()) - disc_prop_value = kwargs.get(discr_propertyname_js, kwargs.get(discr_propertyname_py)) - raise ApiValueError( - "Cannot deserialize input data due to invalid discriminator " - "value. The OpenAPI document has no mapping for discriminator " - "property '%s'='%s' at path: %s" % (discr_propertyname_js, disc_prop_value, path_to_item) - ) - - if new_cls in visited_composed_classes: - # if we are making an instance of a composed schema Descendent - # which allOf includes Ancestor, then Ancestor contains - # a discriminator that includes Descendent. - # So if we make an instance of Descendent, we have to make an - # instance of Ancestor to hold the allOf properties. - # This code detects that use case and makes the instance of Ancestor - # For example: - # When making an instance of Dog, _visited_composed_classes = (Dog,) - # then we make an instance of Animal to include in dog._composed_instances - # so when we are here, cls is Animal - # cls.discriminator != None - # cls not in _visited_composed_classes - # new_cls = Dog - # but we know we know that we already have Dog - # because it is in visited_composed_classes - # so make Animal here - return super(OpenApiModel, cls).__new__(cls) - - # Build a list containing all oneOf and anyOf descendants. - oneof_anyof_classes = None - if cls._composed_schemas is not None: - oneof_anyof_classes = cls._composed_schemas.get("oneOf", ()) + cls._composed_schemas.get("anyOf", ()) - oneof_anyof_child = new_cls in oneof_anyof_classes - kwargs["_visited_composed_classes"] = visited_composed_classes + (cls,) - - if cls._composed_schemas.get("allOf") and oneof_anyof_child: - # Validate that we can make self because when we make the - # new_cls it will not include the allOf validations in self - self_inst = super(OpenApiModel, cls).__new__(cls) - self_inst.__init__(*args, **kwargs) - - new_inst = new_cls.__new__(new_cls, *args, **kwargs) - new_inst.__init__(*args, **kwargs) - return new_inst - - @classmethod - @convert_js_args_to_python_args - def _new_from_openapi_data(cls, *args, **kwargs): - # this function uses the discriminator to - # pick a new schema/class to instantiate because a discriminator - # propertyName value was passed in - - if len(args) == 1: - arg = args[0] - if arg is None and is_type_nullable(cls): - # The input data is the 'null' value and the type is nullable. - return None - - if issubclass(cls, ModelComposed) and allows_single_value_input(cls): - model_kwargs = {} - oneof_instance = get_oneof_instance(cls, model_kwargs, kwargs, model_arg=arg) - return oneof_instance - - visited_composed_classes = kwargs.get("_visited_composed_classes", ()) - if cls.discriminator is None or cls in visited_composed_classes: - # Use case 1: this openapi schema (cls) does not have a discriminator - # Use case 2: we have already visited this class before and are sure that we - # want to instantiate it this time. We have visited this class deserializing - # a payload with a discriminator. During that process we traveled through - # this class but did not make an instance of it. Now we are making an - # instance of a composed class which contains cls in it, so this time make an instance of cls. - # - # Here's an example of use case 2: If Animal has a discriminator - # petType and we pass in "Dog", and the class Dog - # allOf includes Animal, we move through Animal - # once using the discriminator, and pick Dog. - # Then in the composed schema dog Dog, we will make an instance of the - # Animal class (because Dal has allOf: Animal) but this time we won't travel - # through Animal's discriminator because we passed in - # _visited_composed_classes = (Animal,) - - return cls._from_openapi_data(*args, **kwargs) - - # Get the name and value of the discriminator property. - # The discriminator name is obtained from the discriminator meta-data - # and the discriminator value is obtained from the input data. - discr_propertyname_py = list(cls.discriminator.keys())[0] - discr_propertyname_js = cls.attribute_map[discr_propertyname_py] - if discr_propertyname_js in kwargs: - discr_value = kwargs[discr_propertyname_js] - elif discr_propertyname_py in kwargs: - discr_value = kwargs[discr_propertyname_py] - else: - # The input data does not contain the discriminator property. - path_to_item = kwargs.get("_path_to_item", ()) - raise ApiValueError( - "Cannot deserialize input data due to missing discriminator. " - "The discriminator property '%s' is missing at path: %s" % (discr_propertyname_js, path_to_item) - ) - - # Implementation note: the last argument to get_discriminator_class - # is a list of visited classes. get_discriminator_class may recursively - # call itself and update the list of visited classes, and the initial - # value must be an empty list. Hence not using 'visited_composed_classes' - new_cls = get_discriminator_class(cls, discr_propertyname_py, discr_value, []) - if new_cls is None: - path_to_item = kwargs.get("_path_to_item", ()) - disc_prop_value = kwargs.get(discr_propertyname_js, kwargs.get(discr_propertyname_py)) - raise ApiValueError( - "Cannot deserialize input data due to invalid discriminator " - "value. The OpenAPI document has no mapping for discriminator " - "property '%s'='%s' at path: %s" % (discr_propertyname_js, disc_prop_value, path_to_item) - ) - - if new_cls in visited_composed_classes: - # if we are making an instance of a composed schema Descendent - # which allOf includes Ancestor, then Ancestor contains - # a discriminator that includes Descendent. - # So if we make an instance of Descendent, we have to make an - # instance of Ancestor to hold the allOf properties. - # This code detects that use case and makes the instance of Ancestor - # For example: - # When making an instance of Dog, _visited_composed_classes = (Dog,) - # then we make an instance of Animal to include in dog._composed_instances - # so when we are here, cls is Animal - # cls.discriminator != None - # cls not in _visited_composed_classes - # new_cls = Dog - # but we know we know that we already have Dog - # because it is in visited_composed_classes - # so make Animal here - return cls._from_openapi_data(*args, **kwargs) - - # Build a list containing all oneOf and anyOf descendants. - oneof_anyof_classes = None - if cls._composed_schemas is not None: - oneof_anyof_classes = cls._composed_schemas.get("oneOf", ()) + cls._composed_schemas.get("anyOf", ()) - oneof_anyof_child = new_cls in oneof_anyof_classes - kwargs["_visited_composed_classes"] = visited_composed_classes + (cls,) - - if cls._composed_schemas.get("allOf") and oneof_anyof_child: - # Validate that we can make self because when we make the - # new_cls it will not include the allOf validations in self - self_inst = cls._from_openapi_data(*args, **kwargs) - - new_inst = new_cls._new_from_openapi_data(*args, **kwargs) - return new_inst - - -class ModelSimple(OpenApiModel): - """the parent class of models whose type != object in their - swagger/openapi""" - - def __setitem__(self, name, value): - """set the value of an attribute using square-bracket notation: `instance[attr] = val`""" - if name in self.required_properties: - self.__dict__[name] = value - return - - self.set_attribute(name, value) - - def get(self, name, default=None): - """returns the value of an attribute or some default value if the attribute was not set""" - if name in self.required_properties: - return self.__dict__[name] - - return self.__dict__["_data_store"].get(name, default) - - def __getitem__(self, name): - """get the value of an attribute using square-bracket notation: `instance[attr]`""" - if name in self: - return self.get(name) - - raise ApiAttributeError( - "{0} has no attribute '{1}'".format(type(self).__name__, name), [e for e in [self._path_to_item, name] if e] - ) - - def __contains__(self, name): - """used by `in` operator to check if an attrbute value was set in an instance: `'attr' in instance`""" - if name in self.required_properties: - return name in self.__dict__ - - return name in self.__dict__["_data_store"] - - def to_str(self): - """Returns the string representation of the model""" - return str(self.value) - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, self.__class__): - return False - - this_val = self._data_store["value"] - that_val = other._data_store["value"] - types = set() - types.add(this_val.__class__) - types.add(that_val.__class__) - vals_equal = this_val == that_val - return vals_equal - - -class ModelNormal(OpenApiModel): - """the parent class of models whose type == object in their - swagger/openapi""" - - def __setitem__(self, name, value): - """set the value of an attribute using square-bracket notation: `instance[attr] = val`""" - if name in self.required_properties: - self.__dict__[name] = value - return - - self.set_attribute(name, value) - - def get(self, name, default=None): - """returns the value of an attribute or some default value if the attribute was not set""" - if name in self.required_properties: - return self.__dict__[name] - - return self.__dict__["_data_store"].get(name, default) - - def __getitem__(self, name): - """get the value of an attribute using square-bracket notation: `instance[attr]`""" - if name in self: - return self.get(name) - - raise ApiAttributeError( - "{0} has no attribute '{1}'".format(type(self).__name__, name), [e for e in [self._path_to_item, name] if e] - ) - - def __contains__(self, name): - """used by `in` operator to check if an attrbute value was set in an instance: `'attr' in instance`""" - if name in self.required_properties: - return name in self.__dict__ - - return name in self.__dict__["_data_store"] - - def to_dict(self): - """Returns the model properties as a dict""" - return model_to_dict(self, serialize=False) - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, self.__class__): - return False - - if not set(self._data_store.keys()) == set(other._data_store.keys()): - return False - for _var_name, this_val in self._data_store.items(): - that_val = other._data_store[_var_name] - types = set() - types.add(this_val.__class__) - types.add(that_val.__class__) - vals_equal = this_val == that_val - if not vals_equal: - return False - return True - - -class ModelComposed(OpenApiModel): - """the parent class of models whose type == object in their - swagger/openapi and have oneOf/allOf/anyOf - - When one sets a property we use var_name_to_model_instances to store the value in - the correct class instances + run any type checking + validation code. - When one gets a property we use var_name_to_model_instances to get the value - from the correct class instances. - This allows multiple composed schemas to contain the same property with additive - constraints on the value. - - _composed_schemas (dict) stores the anyOf/allOf/oneOf classes - key (str): allOf/oneOf/anyOf - value (list): the classes in the XOf definition. - Note: none_type can be included when the openapi document version >= 3.1.0 - _composed_instances (list): stores a list of instances of the composed schemas - defined in _composed_schemas. When properties are accessed in the self instance, - they are returned from the self._data_store or the data stores in the instances - in self._composed_schemas - _var_name_to_model_instances (dict): maps between a variable name on self and - the composed instances (self included) which contain that data - key (str): property name - value (list): list of class instances, self or instances in _composed_instances - which contain the value that the key is referring to. - """ - - def __setitem__(self, name, value): - """set the value of an attribute using square-bracket notation: `instance[attr] = val`""" - if name in self.required_properties: - self.__dict__[name] = value - return - - """ - Use cases: - 1. additional_properties_type is None (additionalProperties == False in spec) - Check for property presence in self.openapi_types - if not present then throw an error - if present set in self, set attribute - always set on composed schemas - 2. additional_properties_type exists - set attribute on self - always set on composed schemas - """ - if self.additional_properties_type is None: - """ - For an attribute to exist on a composed schema it must: - - fulfill schema_requirements in the self composed schema not considering oneOf/anyOf/allOf schemas AND - - fulfill schema_requirements in each oneOf/anyOf/allOf schemas - - schema_requirements: - For an attribute to exist on a schema it must: - - be present in properties at the schema OR - - have additionalProperties unset (defaults additionalProperties = any type) OR - - have additionalProperties set - """ - if name not in self.openapi_types: - raise ApiAttributeError( - "{0} has no attribute '{1}'".format(type(self).__name__, name), - [e for e in [self._path_to_item, name] if e], - ) - # attribute must be set on self and composed instances - self.set_attribute(name, value) - for model_instance in self._composed_instances: - setattr(model_instance, name, value) - if name not in self._var_name_to_model_instances: - # we assigned an additional property - self.__dict__["_var_name_to_model_instances"][name] = self._composed_instances + [self] - return None - - __unset_attribute_value__ = object() - - def get(self, name, default=None): - """returns the value of an attribute or some default value if the attribute was not set""" - if name in self.required_properties: - return self.__dict__[name] - - # get the attribute from the correct instance - model_instances = self._var_name_to_model_instances.get(name) - values = [] - # A composed model stores self and child (oneof/anyOf/allOf) models under - # self._var_name_to_model_instances. - # Any property must exist in self and all model instances - # The value stored in all model instances must be the same - if model_instances: - for model_instance in model_instances: - if name in model_instance._data_store: - v = model_instance._data_store[name] - if v not in values: - values.append(v) - len_values = len(values) - if len_values == 0: - return default - elif len_values == 1: - return values[0] - elif len_values > 1: - raise ApiValueError( - "Values stored for property {0} in {1} differ when looking " - "at self and self's composed instances. All values must be " - "the same".format(name, type(self).__name__), - [e for e in [self._path_to_item, name] if e], - ) - - def __getitem__(self, name): - """get the value of an attribute using square-bracket notation: `instance[attr]`""" - value = self.get(name, self.__unset_attribute_value__) - if value is self.__unset_attribute_value__: - raise ApiAttributeError( - "{0} has no attribute '{1}'".format(type(self).__name__, name), - [e for e in [self._path_to_item, name] if e], - ) - return value - - def __contains__(self, name): - """used by `in` operator to check if an attrbute value was set in an instance: `'attr' in instance`""" - - if name in self.required_properties: - return name in self.__dict__ - - model_instances = self._var_name_to_model_instances.get(name, self._additional_properties_model_instances) - - if model_instances: - for model_instance in model_instances: - if name in model_instance._data_store: - return True - - return False - - def to_dict(self): - """Returns the model properties as a dict""" - return model_to_dict(self, serialize=False) - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, self.__class__): - return False - - if not set(self._data_store.keys()) == set(other._data_store.keys()): - return False - for _var_name, this_val in self._data_store.items(): - that_val = other._data_store[_var_name] - types = set() - types.add(this_val.__class__) - types.add(that_val.__class__) - vals_equal = this_val == that_val - if not vals_equal: - return False - return True - - -COERCION_INDEX_BY_TYPE = { - ModelComposed: 0, - ModelNormal: 1, - ModelSimple: 2, - none_type: 3, # The type of 'None'. - list: 4, - dict: 5, - float: 6, - int: 7, - bool: 8, - datetime: 9, - date: 10, - str: 11, - file_type: 12, # 'file_type' is an alias for the built-in 'file' or 'io.IOBase' type. -} - -# these are used to limit what type conversions we try to do -# when we have a valid type already and we want to try converting -# to another type -UPCONVERSION_TYPE_PAIRS = ( - (str, datetime), - (str, date), - (int, float), # A float may be serialized as an integer, e.g. '3' is a valid serialized float. - (list, ModelComposed), - (dict, ModelComposed), - (str, ModelComposed), - (int, ModelComposed), - (float, ModelComposed), - (list, ModelComposed), - (list, ModelNormal), - (dict, ModelNormal), - (str, ModelSimple), - (int, ModelSimple), - (float, ModelSimple), - (list, ModelSimple), -) - -COERCIBLE_TYPE_PAIRS = { - False: ( # client instantiation of a model with client data - # (dict, ModelComposed), - # (list, ModelComposed), - # (dict, ModelNormal), - # (list, ModelNormal), - # (str, ModelSimple), - # (int, ModelSimple), - # (float, ModelSimple), - # (list, ModelSimple), - # (str, int), - # (str, float), - # (str, datetime), - # (str, date), - # (int, str), - # (float, str), - ), - True: ( # server -> client data - (dict, ModelComposed), - (list, ModelComposed), - (dict, ModelNormal), - (list, ModelNormal), - (str, ModelSimple), - (int, ModelSimple), - (float, ModelSimple), - (list, ModelSimple), - # (str, int), - # (str, float), - (str, datetime), - (str, date), - # (int, str), - # (float, str), - (str, file_type), - ), -} - - -def get_simple_class(input_value): - """Returns an input_value's simple class that we will use for type checking - Python2: - float and int will return int, where int is the python3 int backport - str and unicode will return str, where str is the python3 str backport - Note: float and int ARE both instances of int backport - Note: str_py2 and unicode_py2 are NOT both instances of str backport - - Args: - input_value (class/class_instance): the item for which we will return - the simple class - """ - if isinstance(input_value, type): - # input_value is a class - return input_value - elif isinstance(input_value, tuple): - return tuple - elif isinstance(input_value, list): - return list - elif isinstance(input_value, dict): - return dict - elif isinstance(input_value, none_type): - return none_type - elif isinstance(input_value, file_type): - return file_type - elif isinstance(input_value, bool): - # this must be higher than the int check because - # isinstance(True, int) == True - return bool - elif isinstance(input_value, int): - return int - elif isinstance(input_value, datetime): - # this must be higher than the date check because - # isinstance(datetime_instance, date) == True - return datetime - elif isinstance(input_value, date): - return date - elif isinstance(input_value, str): - return str - return type(input_value) - - -def check_allowed_values(allowed_values, input_variable_path, input_values): - """Raises an exception if the input_values are not allowed - - Args: - allowed_values (dict): the allowed_values dict - input_variable_path (tuple): the path to the input variable - input_values (list/str/int/float/date/datetime): the values that we - are checking to see if they are in allowed_values - """ - these_allowed_values = list(allowed_values[input_variable_path].values()) - if isinstance(input_values, list) and not set(input_values).issubset(set(these_allowed_values)): - invalid_values = (", ".join(map(str, set(input_values) - set(these_allowed_values))),) - raise ApiValueError( - "Invalid values for `%s` [%s], must be a subset of [%s]" - % (input_variable_path[0], invalid_values, ", ".join(map(str, these_allowed_values))) - ) - elif isinstance(input_values, dict) and not set(input_values.keys()).issubset(set(these_allowed_values)): - invalid_values = ", ".join(map(str, set(input_values.keys()) - set(these_allowed_values))) - raise ApiValueError( - "Invalid keys in `%s` [%s], must be a subset of [%s]" - % (input_variable_path[0], invalid_values, ", ".join(map(str, these_allowed_values))) - ) - elif not isinstance(input_values, (list, dict)) and input_values not in these_allowed_values: - raise ApiValueError( - "Invalid value for `%s` (%s), must be one of %s" - % (input_variable_path[0], input_values, these_allowed_values) - ) - - -def is_json_validation_enabled(schema_keyword, configuration=None): - """Returns true if JSON schema validation is enabled for the specified - validation keyword. This can be used to skip JSON schema structural validation - as requested in the configuration. - - Args: - schema_keyword (string): the name of a JSON schema validation keyword. - configuration (Configuration): the configuration class. - """ - - return ( - configuration is None - or not hasattr(configuration, "_disabled_client_side_validations") - or schema_keyword not in configuration._disabled_client_side_validations - ) - - -def check_validations(validations, input_variable_path, input_values, configuration=None): - """Raises an exception if the input_values are invalid - - Args: - validations (dict): the validation dictionary. - input_variable_path (tuple): the path to the input variable. - input_values (list/str/int/float/date/datetime): the values that we - are checking. - configuration (Configuration): the configuration class. - """ - - if input_values is None: - return - - current_validations = validations[input_variable_path] - if ( - is_json_validation_enabled("multipleOf", configuration) - and "multiple_of" in current_validations - and isinstance(input_values, (int, float)) - and not (float(input_values) / current_validations["multiple_of"]).is_integer() - ): - # Note 'multipleOf' will be as good as the floating point arithmetic. - raise ApiValueError( - "Invalid value for `%s`, value must be a multiple of " - "`%s`" % (input_variable_path[0], current_validations["multiple_of"]) - ) - - if ( - is_json_validation_enabled("maxLength", configuration) - and "max_length" in current_validations - and len(input_values) > current_validations["max_length"] - ): - raise ApiValueError( - "Invalid value for `%s`, length must be less than or equal to " - "`%s`" % (input_variable_path[0], current_validations["max_length"]) - ) - - if ( - is_json_validation_enabled("minLength", configuration) - and "min_length" in current_validations - and len(input_values) < current_validations["min_length"] - ): - raise ApiValueError( - "Invalid value for `%s`, length must be greater than or equal to " - "`%s`" % (input_variable_path[0], current_validations["min_length"]) - ) - - if ( - is_json_validation_enabled("maxItems", configuration) - and "max_items" in current_validations - and len(input_values) > current_validations["max_items"] - ): - raise ApiValueError( - "Invalid value for `%s`, number of items must be less than or " - "equal to `%s`" % (input_variable_path[0], current_validations["max_items"]) - ) - - if ( - is_json_validation_enabled("minItems", configuration) - and "min_items" in current_validations - and len(input_values) < current_validations["min_items"] - ): - raise ValueError( - "Invalid value for `%s`, number of items must be greater than or " - "equal to `%s`" % (input_variable_path[0], current_validations["min_items"]) - ) - - items = ("exclusive_maximum", "inclusive_maximum", "exclusive_minimum", "inclusive_minimum") - if any(item in current_validations for item in items): - if isinstance(input_values, list): - max_val = max(input_values) - min_val = min(input_values) - elif isinstance(input_values, dict): - max_val = max(input_values.values()) - min_val = min(input_values.values()) - else: - max_val = input_values - min_val = input_values - - if ( - is_json_validation_enabled("exclusiveMaximum", configuration) - and "exclusive_maximum" in current_validations - and max_val >= current_validations["exclusive_maximum"] - ): - raise ApiValueError( - "Invalid value for `%s`, must be a value less than `%s`" - % (input_variable_path[0], current_validations["exclusive_maximum"]) - ) - - if ( - is_json_validation_enabled("maximum", configuration) - and "inclusive_maximum" in current_validations - and max_val > current_validations["inclusive_maximum"] - ): - raise ApiValueError( - "Invalid value for `%s`, must be a value less than or equal to " - "`%s`" % (input_variable_path[0], current_validations["inclusive_maximum"]) - ) - - if ( - is_json_validation_enabled("exclusiveMinimum", configuration) - and "exclusive_minimum" in current_validations - and min_val <= current_validations["exclusive_minimum"] - ): - raise ApiValueError( - "Invalid value for `%s`, must be a value greater than `%s`" - % (input_variable_path[0], current_validations["exclusive_maximum"]) - ) - - if ( - is_json_validation_enabled("minimum", configuration) - and "inclusive_minimum" in current_validations - and min_val < current_validations["inclusive_minimum"] - ): - raise ApiValueError( - "Invalid value for `%s`, must be a value greater than or equal " - "to `%s`" % (input_variable_path[0], current_validations["inclusive_minimum"]) - ) - flags = current_validations.get("regex", {}).get("flags", 0) - if ( - is_json_validation_enabled("pattern", configuration) - and "regex" in current_validations - and not re.search(current_validations["regex"]["pattern"], input_values, flags=flags) - ): - err_msg = r"Invalid value for `%s`, must match regular expression `%s`" % ( - input_variable_path[0], - current_validations["regex"]["pattern"], - ) - if flags != 0: - # Don't print the regex flags if the flags are not - # specified in the OAS document. - err_msg = r"%s with flags=`%s`" % (err_msg, flags) - raise ApiValueError(err_msg) - - -def order_response_types(required_types): - """Returns the required types sorted in coercion order - - Args: - required_types (list/tuple): collection of classes or instance of - list or dict with class information inside it. - - Returns: - (list): coercion order sorted collection of classes or instance - of list or dict with class information inside it. - """ - - def index_getter(class_or_instance): - if isinstance(class_or_instance, list): - return COERCION_INDEX_BY_TYPE[list] - elif isinstance(class_or_instance, dict): - return COERCION_INDEX_BY_TYPE[dict] - elif inspect.isclass(class_or_instance) and issubclass(class_or_instance, ModelComposed): - return COERCION_INDEX_BY_TYPE[ModelComposed] - elif inspect.isclass(class_or_instance) and issubclass(class_or_instance, ModelNormal): - return COERCION_INDEX_BY_TYPE[ModelNormal] - elif inspect.isclass(class_or_instance) and issubclass(class_or_instance, ModelSimple): - return COERCION_INDEX_BY_TYPE[ModelSimple] - elif class_or_instance in COERCION_INDEX_BY_TYPE: - return COERCION_INDEX_BY_TYPE[class_or_instance] - raise ApiValueError("Unsupported type: %s" % class_or_instance) - - sorted_types = sorted(required_types, key=lambda class_or_instance: index_getter(class_or_instance)) - return sorted_types - - -def remove_uncoercible(required_types_classes, current_item, spec_property_naming, must_convert=True): - """Only keeps the type conversions that are possible - - Args: - required_types_classes (tuple): tuple of classes that are required - these should be ordered by COERCION_INDEX_BY_TYPE - spec_property_naming (bool): True if the variable names in the input - data are serialized names as specified in the OpenAPI document. - False if the variables names in the input data are python - variable names in PEP-8 snake case. - current_item (any): the current item (input data) to be converted - - Keyword Args: - must_convert (bool): if True the item to convert is of the wrong - type and we want a big list of coercibles - if False, we want a limited list of coercibles - - Returns: - (list): the remaining coercible required types, classes only - """ - current_type_simple = get_simple_class(current_item) - - results_classes = [] - for required_type_class in required_types_classes: - # convert our models to OpenApiModel - required_type_class_simplified = required_type_class - if isinstance(required_type_class_simplified, type): - if issubclass(required_type_class_simplified, ModelComposed): - required_type_class_simplified = ModelComposed - elif issubclass(required_type_class_simplified, ModelNormal): - required_type_class_simplified = ModelNormal - elif issubclass(required_type_class_simplified, ModelSimple): - required_type_class_simplified = ModelSimple - - if required_type_class_simplified == current_type_simple: - # don't consider converting to one's own class - continue - - class_pair = (current_type_simple, required_type_class_simplified) - if must_convert and class_pair in COERCIBLE_TYPE_PAIRS[spec_property_naming]: - results_classes.append(required_type_class) - elif class_pair in UPCONVERSION_TYPE_PAIRS: - results_classes.append(required_type_class) - return results_classes - - -def get_discriminated_classes(cls): - """ - Returns all the classes that a discriminator converts to - TODO: lru_cache this - """ - possible_classes = [] - key = list(cls.discriminator.keys())[0] - if is_type_nullable(cls): - possible_classes.append(cls) - for discr_cls in cls.discriminator[key].values(): - if hasattr(discr_cls, "discriminator") and discr_cls.discriminator is not None: - possible_classes.extend(get_discriminated_classes(discr_cls)) - else: - possible_classes.append(discr_cls) - return possible_classes - - -def get_possible_classes(cls, from_server_context): - # TODO: lru_cache this - possible_classes = [cls] - if from_server_context: - return possible_classes - if hasattr(cls, "discriminator") and cls.discriminator is not None: - possible_classes = [] - possible_classes.extend(get_discriminated_classes(cls)) - elif issubclass(cls, ModelComposed): - possible_classes.extend(composed_model_input_classes(cls)) - return possible_classes - - -def get_required_type_classes(required_types_mixed, spec_property_naming): - """Converts the tuple required_types into a tuple and a dict described - below - - Args: - required_types_mixed (tuple/list): will contain either classes or - instance of list or dict - spec_property_naming (bool): if True these values came from the - server, and we use the data types in our endpoints. - If False, we are client side and we need to include - oneOf and discriminator classes inside the data types in our endpoints - - Returns: - (valid_classes, dict_valid_class_to_child_types_mixed): - valid_classes (tuple): the valid classes that the current item - should be - dict_valid_class_to_child_types_mixed (dict): - valid_class (class): this is the key - child_types_mixed (list/dict/tuple): describes the valid child - types - """ - valid_classes = [] - child_req_types_by_current_type = {} - for required_type in required_types_mixed: - if isinstance(required_type, list): - valid_classes.append(list) - child_req_types_by_current_type[list] = required_type - elif isinstance(required_type, tuple): - valid_classes.append(tuple) - child_req_types_by_current_type[tuple] = required_type - elif isinstance(required_type, dict): - valid_classes.append(dict) - child_req_types_by_current_type[dict] = required_type[str] - else: - valid_classes.extend(get_possible_classes(required_type, spec_property_naming)) - return tuple(valid_classes), child_req_types_by_current_type - - -def change_keys_js_to_python(input_dict, model_class): - """ - Converts from javascript_key keys in the input_dict to python_keys in - the output dict using the mapping in model_class. - If the input_dict contains a key which does not declared in the model_class, - the key is added to the output dict as is. The assumption is the model_class - may have undeclared properties (additionalProperties attribute in the OAS - document). - """ - - if getattr(model_class, "attribute_map", None) is None: - return input_dict - output_dict = {} - reversed_attr_map = {value: key for key, value in model_class.attribute_map.items()} - for javascript_key, value in input_dict.items(): - python_key = reversed_attr_map.get(javascript_key) - if python_key is None: - # if the key is unknown, it is in error or it is an - # additionalProperties variable - python_key = javascript_key - output_dict[python_key] = value - return output_dict - - -def get_type_error(var_value, path_to_item, valid_classes, key_type=False): - error_msg = type_error_message( - var_name=path_to_item[-1], var_value=var_value, valid_classes=valid_classes, key_type=key_type - ) - return ApiTypeError(error_msg, path_to_item=path_to_item, valid_classes=valid_classes, key_type=key_type) - - -def deserialize_primitive(data, klass, path_to_item): - """Deserializes string to primitive type. - - :param data: str/int/float - :param klass: str/class the class to convert to - - :return: int, float, str, bool, date, datetime - """ - additional_message = "" - try: - if klass in {datetime, date}: - additional_message = ( - "If you need your parameter to have a fallback " - "string value, please set its type as `type: {}` in your " - "spec. That allows the value to be any type. " - ) - if klass == datetime: - if len(data) < 8: - raise ValueError("This is not a datetime") - # The string should be in iso8601 datetime format. - parsed_datetime = parse(data) - date_only = ( - parsed_datetime.hour == 0 - and parsed_datetime.minute == 0 - and parsed_datetime.second == 0 - and parsed_datetime.tzinfo is None - and 8 <= len(data) <= 10 - ) - if date_only: - raise ValueError("This is a date, not a datetime") - return parsed_datetime - elif klass == date: - if len(data) < 8: - raise ValueError("This is not a date") - return parse(data).date() - else: - converted_value = klass(data) - if isinstance(data, str) and klass == float: - if str(converted_value) != data: - # '7' -> 7.0 -> '7.0' != '7' - raise ValueError("This is not a float") - return converted_value - except (OverflowError, ValueError) as ex: - # parse can raise OverflowError - raise ApiValueError( - "{0}Failed to parse {1} as {2}".format(additional_message, repr(data), klass.__name__), - path_to_item=path_to_item, - ) from ex - - -def get_discriminator_class(model_class, discr_name, discr_value, cls_visited): - """Returns the child class specified by the discriminator. - - Args: - model_class (OpenApiModel): the model class. - discr_name (string): the name of the discriminator property. - discr_value (any): the discriminator value. - cls_visited (list): list of model classes that have been visited. - Used to determine the discriminator class without - visiting circular references indefinitely. - - Returns: - used_model_class (class/None): the chosen child class that will be used - to deserialize the data, for example dog.Dog. - If a class is not found, None is returned. - """ - - if model_class in cls_visited: - # The class has already been visited and no suitable class was found. - return None - cls_visited.append(model_class) - used_model_class = None - if discr_name in model_class.discriminator: - class_name_to_discr_class = model_class.discriminator[discr_name] - used_model_class = class_name_to_discr_class.get(discr_value) - if used_model_class is None: - # We didn't find a discriminated class in class_name_to_discr_class. - # So look in the ancestor or descendant discriminators - # The discriminator mapping may exist in a descendant (anyOf, oneOf) - # or ancestor (allOf). - # Ancestor example: in the GrandparentAnimal -> ParentPet -> ChildCat - # hierarchy, the discriminator mappings may be defined at any level - # in the hierarchy. - # Descendant example: mammal -> whale/zebra/Pig -> BasquePig/DanishPig - # if we try to make BasquePig from mammal, we need to travel through - # the oneOf descendant discriminators to find BasquePig - descendant_classes = model_class._composed_schemas.get("oneOf", ()) + model_class._composed_schemas.get( - "anyOf", () - ) - ancestor_classes = model_class._composed_schemas.get("allOf", ()) - possible_classes = descendant_classes + ancestor_classes - for cls in possible_classes: - # Check if the schema has inherited discriminators. - if hasattr(cls, "discriminator") and cls.discriminator is not None: - used_model_class = get_discriminator_class(cls, discr_name, discr_value, cls_visited) - if used_model_class is not None: - return used_model_class - return used_model_class - - -def deserialize_model(model_data, model_class, path_to_item, check_type, configuration, spec_property_naming): - """Deserializes model_data to model instance. - - Args: - model_data (int/str/float/bool/none_type/list/dict): data to instantiate the model - model_class (OpenApiModel): the model class - path_to_item (list): path to the model in the received data - check_type (bool): whether to check the data tupe for the values in - the model - configuration (Configuration): the instance to use to convert files - spec_property_naming (bool): True if the variable names in the input - data are serialized names as specified in the OpenAPI document. - False if the variables names in the input data are python - variable names in PEP-8 snake case. - - Returns: - model instance - - Raise: - ApiTypeError - ApiValueError - ApiKeyError - """ - - kw_args = dict( - _check_type=check_type, - _path_to_item=path_to_item, - _configuration=configuration, - _spec_property_naming=spec_property_naming, - ) - - if issubclass(model_class, ModelSimple): - return model_class._new_from_openapi_data(model_data, **kw_args) - elif isinstance(model_data, list): - return model_class._new_from_openapi_data(*model_data, **kw_args) - if isinstance(model_data, dict): - kw_args.update(model_data) - return model_class._new_from_openapi_data(**kw_args) - elif isinstance(model_data, PRIMITIVE_TYPES): - return model_class._new_from_openapi_data(model_data, **kw_args) - - -def deserialize_file(response_data, configuration, content_disposition=None): - """Deserializes body to file - - Saves response body into a file in a temporary folder, - using the filename from the `Content-Disposition` header if provided. - - Args: - param response_data (str): the file data to write - configuration (Configuration): the instance to use to convert files - - Keyword Args: - content_disposition (str): the value of the Content-Disposition - header - - Returns: - (file_type): the deserialized file which is open - The user is responsible for closing and reading the file - """ - fd, path = tempfile.mkstemp(dir=configuration.temp_folder_path) - os.close(fd) - os.remove(path) - - if content_disposition: - filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', content_disposition).group(1) - path = os.path.join(os.path.dirname(path), filename) - - with open(path, "wb") as f: - if isinstance(response_data, str): - # change str to bytes so we can write it - response_data = response_data.encode("utf-8") - f.write(response_data) - - f = open(path, "rb") - return f - - -def attempt_convert_item( - input_value, - valid_classes, - path_to_item, - configuration, - spec_property_naming, - key_type=False, - must_convert=False, - check_type=True, -): - """ - Args: - input_value (any): the data to convert - valid_classes (any): the classes that are valid - path_to_item (list): the path to the item to convert - configuration (Configuration): the instance to use to convert files - spec_property_naming (bool): True if the variable names in the input - data are serialized names as specified in the OpenAPI document. - False if the variables names in the input data are python - variable names in PEP-8 snake case. - key_type (bool): if True we need to convert a key type (not supported) - must_convert (bool): if True we must convert - check_type (bool): if True we check the type or the returned data in - ModelComposed/ModelNormal/ModelSimple instances - - Returns: - instance (any) the fixed item - - Raises: - ApiTypeError - ApiValueError - ApiKeyError - """ - valid_classes_ordered = order_response_types(valid_classes) - valid_classes_coercible = remove_uncoercible(valid_classes_ordered, input_value, spec_property_naming) - if not valid_classes_coercible or key_type: - # we do not handle keytype errors, json will take care - # of this for us - if configuration is None or not configuration.discard_unknown_keys: - raise get_type_error(input_value, path_to_item, valid_classes, key_type=key_type) - for valid_class in valid_classes_coercible: - try: - if issubclass(valid_class, OpenApiModel): - return deserialize_model( - input_value, valid_class, path_to_item, check_type, configuration, spec_property_naming - ) - elif valid_class == file_type: - return deserialize_file(input_value, configuration) - return deserialize_primitive(input_value, valid_class, path_to_item) - except (ApiTypeError, ApiValueError, ApiKeyError) as conversion_exc: - if must_convert: - raise conversion_exc - # if we have conversion errors when must_convert == False - # we ignore the exception and move on to the next class - continue - # we were unable to convert, must_convert == False - return input_value - - -def is_type_nullable(input_type): - """ - Returns true if None is an allowed value for the specified input_type. - - A type is nullable if at least one of the following conditions is true: - 1. The OAS 'nullable' attribute has been specified, - 1. The type is the 'null' type, - 1. The type is a anyOf/oneOf composed schema, and a child schema is - the 'null' type. - Args: - input_type (type): the class of the input_value that we are - checking - Returns: - bool - """ - if input_type is none_type: - return True - if issubclass(input_type, OpenApiModel) and input_type._nullable: - return True - if issubclass(input_type, ModelComposed): - # If oneOf/anyOf, check if the 'null' type is one of the allowed types. - for t in input_type._composed_schemas.get("oneOf", ()): - if is_type_nullable(t): - return True - for t in input_type._composed_schemas.get("anyOf", ()): - if is_type_nullable(t): - return True - return False - - -def is_valid_type(input_class_simple, valid_classes): - """ - Args: - input_class_simple (class): the class of the input_value that we are - checking - valid_classes (tuple): the valid classes that the current item - should be - Returns: - bool - """ - valid_type = input_class_simple in valid_classes - if not valid_type and (issubclass(input_class_simple, OpenApiModel) or input_class_simple is none_type): - for valid_class in valid_classes: - if input_class_simple is none_type and is_type_nullable(valid_class): - # Schema is oneOf/anyOf and the 'null' type is one of the allowed types. - return True - if not (issubclass(valid_class, OpenApiModel) and valid_class.discriminator): - continue - discr_propertyname_py = list(valid_class.discriminator.keys())[0] - discriminator_classes = valid_class.discriminator[discr_propertyname_py].values() - valid_type = is_valid_type(input_class_simple, discriminator_classes) - if valid_type: - return True - return valid_type - - -def validate_and_convert_types( - input_value, required_types_mixed, path_to_item, spec_property_naming, _check_type, configuration=None -): - """Raises a TypeError is there is a problem, otherwise returns value - - Args: - input_value (any): the data to validate/convert - required_types_mixed (list/dict/tuple): A list of - valid classes, or a list tuples of valid classes, or a dict where - the value is a tuple of value classes - path_to_item: (list) the path to the data being validated - this stores a list of keys or indices to get to the data being - validated - spec_property_naming (bool): True if the variable names in the input - data are serialized names as specified in the OpenAPI document. - False if the variables names in the input data are python - variable names in PEP-8 snake case. - _check_type: (boolean) if true, type will be checked and conversion - will be attempted. - configuration: (Configuration): the configuration class to use - when converting file_type items. - If passed, conversion will be attempted when possible - If not passed, no conversions will be attempted and - exceptions will be raised - - Returns: - the correctly typed value - - Raises: - ApiTypeError - """ - results = get_required_type_classes(required_types_mixed, spec_property_naming) - valid_classes, child_req_types_by_current_type = results - - input_class_simple = get_simple_class(input_value) - valid_type = is_valid_type(input_class_simple, valid_classes) - if not valid_type: - if configuration: - # if input_value is not valid_type try to convert it - converted_instance = attempt_convert_item( - input_value, - valid_classes, - path_to_item, - configuration, - spec_property_naming, - key_type=False, - must_convert=True, - check_type=_check_type, - ) - return converted_instance - else: - raise get_type_error(input_value, path_to_item, valid_classes, key_type=False) - - # input_value's type is in valid_classes - if len(valid_classes) > 1 and configuration: - # there are valid classes which are not the current class - valid_classes_coercible = remove_uncoercible( - valid_classes, input_value, spec_property_naming, must_convert=False - ) - if valid_classes_coercible: - converted_instance = attempt_convert_item( - input_value, - valid_classes_coercible, - path_to_item, - configuration, - spec_property_naming, - key_type=False, - must_convert=False, - check_type=_check_type, - ) - return converted_instance - - if child_req_types_by_current_type == {}: - # all types are of the required types and there are no more inner - # variables left to look at - return input_value - inner_required_types = child_req_types_by_current_type.get(type(input_value)) - if inner_required_types is None: - # for this type, there are not more inner variables left to look at - return input_value - if isinstance(input_value, list): - if input_value == []: - # allow an empty list - return input_value - for index, inner_value in enumerate(input_value): - inner_path = list(path_to_item) - inner_path.append(index) - input_value[index] = validate_and_convert_types( - inner_value, - inner_required_types, - inner_path, - spec_property_naming, - _check_type, - configuration=configuration, - ) - elif isinstance(input_value, dict): - if input_value == {}: - # allow an empty dict - return input_value - for inner_key, inner_val in input_value.items(): - inner_path = list(path_to_item) - inner_path.append(inner_key) - if get_simple_class(inner_key) != str: - raise get_type_error(inner_key, inner_path, valid_classes, key_type=True) - input_value[inner_key] = validate_and_convert_types( - inner_val, - inner_required_types, - inner_path, - spec_property_naming, - _check_type, - configuration=configuration, - ) - return input_value - - -def model_to_dict(model_instance, serialize=True): - """Returns the model properties as a dict - - Args: - model_instance (one of your model instances): the model instance that - will be converted to a dict. - - Keyword Args: - serialize (bool): if True, the keys in the dict will be values from - attribute_map - """ - result = {} - - model_instances = [model_instance] - if model_instance._composed_schemas: - model_instances.extend(model_instance._composed_instances) - seen_json_attribute_names = set() - used_fallback_python_attribute_names = set() - py_to_json_map = {} - for model_instance in model_instances: - for attr, value in model_instance._data_store.items(): - if serialize: - # we use get here because additional property key names do not - # exist in attribute_map - try: - attr = model_instance.attribute_map[attr] - py_to_json_map.update(model_instance.attribute_map) - seen_json_attribute_names.add(attr) - except KeyError: - used_fallback_python_attribute_names.add(attr) - if isinstance(value, list): - if not value: - # empty list or None - result[attr] = value - else: - res = [] - for v in value: - if isinstance(v, PRIMITIVE_TYPES) or v is None: - res.append(v) - elif isinstance(v, ModelSimple): - res.append(v.value) - else: - res.append(model_to_dict(v, serialize=serialize)) - result[attr] = res - elif isinstance(value, dict): - result[attr] = dict( - map( - lambda item: (item[0], model_to_dict(item[1], serialize=serialize)) - if hasattr(item[1], "_data_store") - else item, - value.items(), - ) - ) - elif isinstance(value, ModelSimple): - result[attr] = value.value - elif hasattr(value, "_data_store"): - result[attr] = model_to_dict(value, serialize=serialize) - else: - result[attr] = value - if serialize: - for python_key in used_fallback_python_attribute_names: - json_key = py_to_json_map.get(python_key) - if json_key is None: - continue - if python_key == json_key: - continue - json_key_assigned_no_need_for_python_key = json_key in seen_json_attribute_names - if json_key_assigned_no_need_for_python_key: - del result[python_key] - - return result - - -def type_error_message(var_value=None, var_name=None, valid_classes=None, key_type=None): - """ - Keyword Args: - var_value (any): the variable which has the type_error - var_name (str): the name of the variable which has the typ error - valid_classes (tuple): the accepted classes for current_item's - value - key_type (bool): False if our value is a value in a dict - True if it is a key in a dict - False if our item is an item in a list - """ - key_or_value = "value" - if key_type: - key_or_value = "key" - valid_classes_phrase = get_valid_classes_phrase(valid_classes) - msg = "Invalid type for variable '{0}'. Required {1} type {2} and " "passed type was {3}".format( - var_name, - key_or_value, - valid_classes_phrase, - type(var_value).__name__, - ) - return msg - - -def get_valid_classes_phrase(input_classes): - """Returns a string phrase describing what types are allowed""" - all_classes = list(input_classes) - all_classes = sorted(all_classes, key=lambda cls: cls.__name__) - all_class_names = [cls.__name__ for cls in all_classes] - if len(all_class_names) == 1: - return "is {0}".format(all_class_names[0]) - return "is one of [{0}]".format(", ".join(all_class_names)) - - -def get_allof_instances(self, model_args, constant_args): - """ - Args: - self: the class we are handling - model_args (dict): var_name to var_value - used to make instances - constant_args (dict): - metadata arguments: - _check_type - _path_to_item - _spec_property_naming - _configuration - _visited_composed_classes - - Returns - composed_instances (list) - """ - composed_instances = [] - for allof_class in self._composed_schemas["allOf"]: - try: - allof_instance = allof_class(**model_args, **constant_args) - composed_instances.append(allof_instance) - except Exception as ex: - raise ApiValueError( - "Invalid inputs given to generate an instance of '%s'. The " - "input data was invalid for the allOf schema '%s' in the composed " - "schema '%s'. Error=%s" % (allof_class.__name__, allof_class.__name__, self.__class__.__name__, str(ex)) - ) from ex - return composed_instances - - -def get_oneof_instance(cls, model_kwargs, constant_kwargs, model_arg=None): - """ - Find the oneOf schema that matches the input data (e.g. payload). - If exactly one schema matches the input data, an instance of that schema - is returned. - If zero or more than one schema match the input data, an exception is raised. - In OAS 3.x, the payload MUST, by validation, match exactly one of the - schemas described by oneOf. - - Args: - cls: the class we are handling - model_kwargs (dict): var_name to var_value - The input data, e.g. the payload that must match a oneOf schema - in the OpenAPI document. - constant_kwargs (dict): var_name to var_value - args that every model requires, including configuration, server - and path to item. - - Kwargs: - model_arg: (int, float, bool, str, date, datetime, ModelSimple, None): - the value to assign to a primitive class or ModelSimple class - Notes: - - this is only passed in when oneOf includes types which are not object - - None is used to suppress handling of model_arg, nullable models are handled in __new__ - - Returns - oneof_instance (instance) - """ - if len(cls._composed_schemas["oneOf"]) == 0: - return None - - oneof_instances = [] - # Iterate over each oneOf schema and determine if the input data - # matches the oneOf schemas. - for oneof_class in cls._composed_schemas["oneOf"]: - # The composed oneOf schema allows the 'null' type and the input data - # is the null value. This is a OAS >= 3.1 feature. - if oneof_class is none_type: - # skip none_types because we are deserializing dict data. - # none_type deserialization is handled in the __new__ method - continue - - single_value_input = allows_single_value_input(oneof_class) - - try: - if not single_value_input: - oneof_instance = oneof_class(**model_kwargs, **constant_kwargs) - else: - if issubclass(oneof_class, ModelSimple): - oneof_instance = oneof_class(model_arg, **constant_kwargs) - elif oneof_class in PRIMITIVE_TYPES: - oneof_instance = validate_and_convert_types( - model_arg, - (oneof_class,), - constant_kwargs["_path_to_item"], - constant_kwargs["_spec_property_naming"], - constant_kwargs["_check_type"], - configuration=constant_kwargs["_configuration"], - ) - oneof_instances.append(oneof_instance) - except Exception: - pass - if len(oneof_instances) == 0: - raise ApiValueError( - "Invalid inputs given to generate an instance of %s. None " - "of the oneOf schemas matched the input data." % cls.__name__ - ) - elif len(oneof_instances) > 1: - raise ApiValueError( - "Invalid inputs given to generate an instance of %s. Multiple " - "oneOf schemas matched the inputs, but a max of one is allowed." % cls.__name__ - ) - return oneof_instances[0] - - -def get_anyof_instances(self, model_args, constant_args): - """ - Args: - self: the class we are handling - model_args (dict): var_name to var_value - The input data, e.g. the payload that must match at least one - anyOf child schema in the OpenAPI document. - constant_args (dict): var_name to var_value - args that every model requires, including configuration, server - and path to item. - - Returns - anyof_instances (list) - """ - anyof_instances = [] - if len(self._composed_schemas["anyOf"]) == 0: - return anyof_instances - - for anyof_class in self._composed_schemas["anyOf"]: - # The composed oneOf schema allows the 'null' type and the input data - # is the null value. This is a OAS >= 3.1 feature. - if anyof_class is none_type: - # skip none_types because we are deserializing dict data. - # none_type deserialization is handled in the __new__ method - continue - - try: - anyof_instance = anyof_class(**model_args, **constant_args) - anyof_instances.append(anyof_instance) - except Exception: - pass - if len(anyof_instances) == 0: - raise ApiValueError( - "Invalid inputs given to generate an instance of %s. None of the " - "anyOf schemas matched the inputs." % self.__class__.__name__ - ) - return anyof_instances - - -def get_discarded_args(self, composed_instances, model_args): - """ - Gathers the args that were discarded by configuration.discard_unknown_keys - """ - model_arg_keys = model_args.keys() - discarded_args = set() - # arguments passed to self were already converted to python names - # before __init__ was called - for instance in composed_instances: - if instance.__class__ in self._composed_schemas["allOf"]: - try: - keys = instance.to_dict().keys() - discarded_keys = model_args - keys - discarded_args.update(discarded_keys) - except Exception: - # allOf integer schema will throw exception - pass - else: - try: - all_keys = set(model_to_dict(instance, serialize=False).keys()) - js_keys = model_to_dict(instance, serialize=True).keys() - all_keys.update(js_keys) - discarded_keys = model_arg_keys - all_keys - discarded_args.update(discarded_keys) - except Exception: - # allOf integer schema will throw exception - pass - return discarded_args - - -def validate_get_composed_info(constant_args, model_args, self): - """ - For composed schemas, generate schema instances for - all schemas in the oneOf/anyOf/allOf definition. If additional - properties are allowed, also assign those properties on - all matched schemas that contain additionalProperties. - Openapi schemas are python classes. - - Exceptions are raised if: - - 0 or > 1 oneOf schema matches the model_args input data - - no anyOf schema matches the model_args input data - - any of the allOf schemas do not match the model_args input data - - Args: - constant_args (dict): these are the args that every model requires - model_args (dict): these are the required and optional spec args that - were passed in to make this model - self (class): the class that we are instantiating - This class contains self._composed_schemas - - Returns: - composed_info (list): length three - composed_instances (list): the composed instances which are not - self - var_name_to_model_instances (dict): a dict going from var_name - to the model_instance which holds that var_name - the model_instance may be self or an instance of one of the - classes in self.composed_instances() - additional_properties_model_instances (list): a list of the - model instances which have the property - additional_properties_type. This list can include self - """ - # create composed_instances - composed_instances = [] - allof_instances = get_allof_instances(self, model_args, constant_args) - composed_instances.extend(allof_instances) - oneof_instance = get_oneof_instance(self.__class__, model_args, constant_args) - if oneof_instance is not None: - composed_instances.append(oneof_instance) - anyof_instances = get_anyof_instances(self, model_args, constant_args) - composed_instances.extend(anyof_instances) - """ - set additional_properties_model_instances - additional properties must be evaluated at the schema level - so self's additional properties are most important - If self is a composed schema with: - - no properties defined in self - - additionalProperties: False - Then for object payloads every property is an additional property - and they are not allowed, so only empty dict is allowed - - Properties must be set on all matching schemas - so when a property is assigned toa composed instance, it must be set on all - composed instances regardless of additionalProperties presence - keeping it to prevent breaking changes in v5.0.1 - TODO remove cls._additional_properties_model_instances in 6.0.0 - """ - additional_properties_model_instances = [] - if self.additional_properties_type is not None: - additional_properties_model_instances = [self] - - """ - no need to set properties on self in here, they will be set in __init__ - By here all composed schema oneOf/anyOf/allOf instances have their properties set using - model_args - """ - discarded_args = get_discarded_args(self, composed_instances, model_args) - - # map variable names to composed_instances - var_name_to_model_instances = {} - for prop_name in model_args: - if prop_name not in discarded_args: - var_name_to_model_instances[prop_name] = [self] + composed_instances - - return [composed_instances, var_name_to_model_instances, additional_properties_model_instances, discarded_args] diff --git a/pinecone/core/client/models/__init__.py b/pinecone/core/client/models/__init__.py index 23c7ef03..85b94981 100644 --- a/pinecone/core/client/models/__init__.py +++ b/pinecone/core/client/models/__init__.py @@ -1,38 +1,47 @@ +# coding: utf-8 + # flake8: noqa +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 -# import all models into this package -# if you have many models here with many references from one model to another this may -# raise a RecursionError -# to avoid this, import only the models that you directly need like: -# from from pinecone.core.client.model.pet import Pet -# or import this package, but before doing it, use: -# import sys -# sys.setrecursionlimit(n) -from pinecone.core.client.model.approximated_config import ApproximatedConfig -from pinecone.core.client.model.collection_meta import CollectionMeta -from pinecone.core.client.model.create_collection_request import CreateCollectionRequest -from pinecone.core.client.model.create_request import CreateRequest -from pinecone.core.client.model.delete_request import DeleteRequest -from pinecone.core.client.model.describe_index_stats_request import DescribeIndexStatsRequest -from pinecone.core.client.model.describe_index_stats_response import DescribeIndexStatsResponse -from pinecone.core.client.model.fetch_response import FetchResponse -from pinecone.core.client.model.hnsw_config import HnswConfig -from pinecone.core.client.model.index_meta import IndexMeta -from pinecone.core.client.model.index_meta_database import IndexMetaDatabase -from pinecone.core.client.model.index_meta_database_status import IndexMetaDatabaseStatus -from pinecone.core.client.model.namespace_summary import NamespaceSummary -from pinecone.core.client.model.patch_request import PatchRequest -from pinecone.core.client.model.protobuf_any import ProtobufAny -from pinecone.core.client.model.protobuf_null_value import ProtobufNullValue -from pinecone.core.client.model.query_request import QueryRequest -from pinecone.core.client.model.query_response import QueryResponse -from pinecone.core.client.model.query_vector import QueryVector -from pinecone.core.client.model.rpc_status import RpcStatus -from pinecone.core.client.model.scored_vector import ScoredVector -from pinecone.core.client.model.single_query_results import SingleQueryResults -from pinecone.core.client.model.sparse_values import SparseValues -from pinecone.core.client.model.update_request import UpdateRequest -from pinecone.core.client.model.upsert_request import UpsertRequest -from pinecone.core.client.model.upsert_response import UpsertResponse -from pinecone.core.client.model.vector import Vector +# import models into model package +from pinecone.core.client.models.approximated_config import ApproximatedConfig +from pinecone.core.client.models.collection_meta import CollectionMeta +from pinecone.core.client.models.create_collection_request import CreateCollectionRequest +from pinecone.core.client.models.create_request import CreateRequest +from pinecone.core.client.models.create_request_index_config import CreateRequestIndexConfig +from pinecone.core.client.models.delete_request import DeleteRequest +from pinecone.core.client.models.describe_index_stats_request import DescribeIndexStatsRequest +from pinecone.core.client.models.describe_index_stats_response import DescribeIndexStatsResponse +from pinecone.core.client.models.fetch_response import FetchResponse +from pinecone.core.client.models.hnsw_config import HnswConfig +from pinecone.core.client.models.index_meta import IndexMeta +from pinecone.core.client.models.index_meta_database import IndexMetaDatabase +from pinecone.core.client.models.index_meta_database_index_config import IndexMetaDatabaseIndexConfig +from pinecone.core.client.models.index_meta_status import IndexMetaStatus +from pinecone.core.client.models.list_indexes200_response import ListIndexes200Response +from pinecone.core.client.models.namespace_summary import NamespaceSummary +from pinecone.core.client.models.patch_request import PatchRequest +from pinecone.core.client.models.protobuf_any import ProtobufAny +from pinecone.core.client.models.protobuf_null_value import ProtobufNullValue +from pinecone.core.client.models.query_request import QueryRequest +from pinecone.core.client.models.query_response import QueryResponse +from pinecone.core.client.models.query_vector import QueryVector +from pinecone.core.client.models.rpc_status import RpcStatus +from pinecone.core.client.models.scored_vector import ScoredVector +from pinecone.core.client.models.single_query_results import SingleQueryResults +from pinecone.core.client.models.sparse_values import SparseValues +from pinecone.core.client.models.update_request import UpdateRequest +from pinecone.core.client.models.upsert_request import UpsertRequest +from pinecone.core.client.models.upsert_response import UpsertResponse +from pinecone.core.client.models.vector import Vector diff --git a/pinecone/core/client/models/approximated_config.py b/pinecone/core/client/models/approximated_config.py new file mode 100644 index 00000000..3a478f77 --- /dev/null +++ b/pinecone/core/client/models/approximated_config.py @@ -0,0 +1,74 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Optional +from pydantic import BaseModel, StrictBool, StrictInt + + +class ApproximatedConfig(BaseModel): + """ + ApproximatedConfig + """ + + k_bits: Optional[StrictInt] = 512 + hybrid: Optional[StrictBool] = False + __properties = ["k_bits", "hybrid"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> ApproximatedConfig: + """Create an instance of ApproximatedConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> ApproximatedConfig: + """Create an instance of ApproximatedConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return ApproximatedConfig.parse_obj(obj) + + _obj = ApproximatedConfig.parse_obj( + { + "k_bits": obj.get("k_bits") if obj.get("k_bits") is not None else 512, + "hybrid": obj.get("hybrid") if obj.get("hybrid") is not None else False, + } + ) + return _obj diff --git a/pinecone/core/client/models/collection_meta.py b/pinecone/core/client/models/collection_meta.py new file mode 100644 index 00000000..98ef3b0b --- /dev/null +++ b/pinecone/core/client/models/collection_meta.py @@ -0,0 +1,79 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from pydantic import BaseModel, Field, StrictInt, StrictStr + + +class CollectionMeta(BaseModel): + """ + CollectionMeta + """ + + name: StrictStr = Field(...) + size: StrictInt = Field(..., description="The size of the collection in bytes.") + status: StrictStr = Field(..., description="The status of the collection.") + dimension: StrictInt = Field(..., description="The dimension of the records stored in the collection") + vector_count: StrictInt = Field(..., description="The number of records stored in the collection") + __properties = ["name", "size", "status", "dimension", "vector_count"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> CollectionMeta: + """Create an instance of CollectionMeta from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> CollectionMeta: + """Create an instance of CollectionMeta from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return CollectionMeta.parse_obj(obj) + + _obj = CollectionMeta.parse_obj( + { + "name": obj.get("name"), + "size": obj.get("size"), + "status": obj.get("status"), + "dimension": obj.get("dimension"), + "vector_count": obj.get("vector_count"), + } + ) + return _obj diff --git a/pinecone/core/client/models/create_collection_request.py b/pinecone/core/client/models/create_collection_request.py new file mode 100644 index 00000000..4ca48229 --- /dev/null +++ b/pinecone/core/client/models/create_collection_request.py @@ -0,0 +1,68 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from pydantic import BaseModel, Field, StrictStr + + +class CreateCollectionRequest(BaseModel): + """ + CreateCollectionRequest + """ + + name: StrictStr = Field(..., description="The name of the collection to be created.") + source: StrictStr = Field(..., description="The name of the index to be used as the source for the collection.") + __properties = ["name", "source"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> CreateCollectionRequest: + """Create an instance of CreateCollectionRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> CreateCollectionRequest: + """Create an instance of CreateCollectionRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return CreateCollectionRequest.parse_obj(obj) + + _obj = CreateCollectionRequest.parse_obj({"name": obj.get("name"), "source": obj.get("source")}) + return _obj diff --git a/pinecone/core/client/models/create_request.py b/pinecone/core/client/models/create_request.py new file mode 100644 index 00000000..9c6c9517 --- /dev/null +++ b/pinecone/core/client/models/create_request.py @@ -0,0 +1,149 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Any, Dict, Optional +from pydantic import BaseModel, Field, StrictInt, StrictStr, validator +from pinecone.core.client.models.create_request_index_config import CreateRequestIndexConfig + + +class CreateRequest(BaseModel): + """ + CreateRequest + """ + + name: StrictStr = Field( + ..., description="The name of the index to be created. The maximum length is 45 characters." + ) + dimension: StrictInt = Field(..., description="The dimensions of the vectors to be inserted in the index") + region: StrictStr = Field(..., description="The region where you would like your index to be created") + cloud: StrictStr = Field(..., description="The public cloud where you would like your index hosted") + capacity_mode: StrictStr = Field(..., description="The capacity mode for the index") + index_type: Optional[StrictStr] = Field( + "approximated", description="The type of vector index. Pinecone supports 'approximated'." + ) + metric: Optional[StrictStr] = Field( + "cosine", + description="The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'.", + ) + pods: Optional[StrictInt] = Field(1, description="The number of pods for the index to use,including replicas.") + replicas: Optional[StrictInt] = Field( + 1, + description="The number of replicas. Replicas duplicate your index. They provide higher availability and throughput.", + ) + shards: Optional[StrictInt] = Field(1, description="The number of shards to be used in the index.") + pod_type: Optional[StrictStr] = Field( + "p1.x1", + description="The type of pod to use. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`.", + ) + index_config: Optional[CreateRequestIndexConfig] = None + metadata_config: Optional[Dict[str, Any]] = Field( + None, + description='Configuration for the behavior of Pinecone\'s internal metadata index. By default, all metadata is indexed; when `metadata_config` is present, only specified metadata fields are indexed. To specify metadata fields to index, provide a JSON object of the following form: ``` {"indexed": ["example_metadata_field"]} ``` ', + ) + source_collection: Optional[StrictStr] = Field( + None, description="The name of the collection to create an index from" + ) + __properties = [ + "name", + "dimension", + "region", + "cloud", + "capacity_mode", + "index_type", + "metric", + "pods", + "replicas", + "shards", + "pod_type", + "index_config", + "metadata_config", + "source_collection", + ] + + @validator("cloud") + def cloud_validate_enum(cls, value): + """Validates the enum""" + if value not in ("gcp", "aws", "azure"): + raise ValueError("must be one of enum values ('gcp', 'aws', 'azure')") + return value + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> CreateRequest: + """Create an instance of CreateRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of index_config + if self.index_config: + _dict["index_config"] = self.index_config.to_dict() + # set to None if metadata_config (nullable) is None + # and __fields_set__ contains the field + if self.metadata_config is None and "metadata_config" in self.__fields_set__: + _dict["metadata_config"] = None + + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> CreateRequest: + """Create an instance of CreateRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return CreateRequest.parse_obj(obj) + + _obj = CreateRequest.parse_obj( + { + "name": obj.get("name"), + "dimension": obj.get("dimension"), + "region": obj.get("region"), + "cloud": obj.get("cloud"), + "capacity_mode": obj.get("capacity_mode"), + "index_type": obj.get("index_type") if obj.get("index_type") is not None else "approximated", + "metric": obj.get("metric") if obj.get("metric") is not None else "cosine", + "pods": obj.get("pods") if obj.get("pods") is not None else 1, + "replicas": obj.get("replicas") if obj.get("replicas") is not None else 1, + "shards": obj.get("shards") if obj.get("shards") is not None else 1, + "pod_type": obj.get("pod_type") if obj.get("pod_type") is not None else "p1.x1", + "index_config": CreateRequestIndexConfig.from_dict(obj.get("index_config")) + if obj.get("index_config") is not None + else None, + "metadata_config": obj.get("metadata_config"), + "source_collection": obj.get("source_collection"), + } + ) + return _obj diff --git a/pinecone/core/client/models/create_request_index_config.py b/pinecone/core/client/models/create_request_index_config.py new file mode 100644 index 00000000..091fd207 --- /dev/null +++ b/pinecone/core/client/models/create_request_index_config.py @@ -0,0 +1,140 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +from inspect import getfullargspec +import json +import pprint +import re # noqa: F401 + +from typing import Any, List, Optional +from pydantic import BaseModel, Field, StrictStr, ValidationError, validator +from pinecone.core.client.models.approximated_config import ApproximatedConfig +from typing import Union, Any, List, TYPE_CHECKING +from pydantic import StrictStr, Field + +CREATEREQUESTINDEXCONFIG_ONE_OF_SCHEMAS = ["ApproximatedConfig"] + + +class CreateRequestIndexConfig(BaseModel): + """ + CreateRequestIndexConfig + """ + + # data type: ApproximatedConfig + oneof_schema_1_validator: Optional[ApproximatedConfig] = None + if TYPE_CHECKING: + actual_instance: Union[ApproximatedConfig] + else: + actual_instance: Any + one_of_schemas: List[str] = Field(CREATEREQUESTINDEXCONFIG_ONE_OF_SCHEMAS, const=True) + + class Config: + validate_assignment = True + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @validator("actual_instance") + def actual_instance_must_validate_oneof(cls, v): + instance = CreateRequestIndexConfig.construct() + error_messages = [] + match = 0 + # validate data type: ApproximatedConfig + if not isinstance(v, ApproximatedConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `ApproximatedConfig`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError( + "Multiple matches found when setting `actual_instance` in CreateRequestIndexConfig with oneOf schemas: ApproximatedConfig. Details: " + + ", ".join(error_messages) + ) + elif match == 0: + # no match + raise ValueError( + "No match found when setting `actual_instance` in CreateRequestIndexConfig with oneOf schemas: ApproximatedConfig. Details: " + + ", ".join(error_messages) + ) + else: + return v + + @classmethod + def from_dict(cls, obj: dict) -> CreateRequestIndexConfig: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> CreateRequestIndexConfig: + """Returns the object represented by the json string""" + instance = CreateRequestIndexConfig.construct() + error_messages = [] + match = 0 + + # deserialize data into ApproximatedConfig + try: + instance.actual_instance = ApproximatedConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError( + "Multiple matches found when deserializing the JSON string into CreateRequestIndexConfig with oneOf schemas: ApproximatedConfig. Details: " + + ", ".join(error_messages) + ) + elif match == 0: + # no match + raise ValueError( + "No match found when deserializing the JSON string into CreateRequestIndexConfig with oneOf schemas: ApproximatedConfig. Details: " + + ", ".join(error_messages) + ) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + to_json = getattr(self.actual_instance, "to_json", None) + if callable(to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> dict: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + to_dict = getattr(self.actual_instance, "to_dict", None) + if callable(to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.dict()) diff --git a/pinecone/core/client/models/delete_request.py b/pinecone/core/client/models/delete_request.py new file mode 100644 index 00000000..9e189c8b --- /dev/null +++ b/pinecone/core/client/models/delete_request.py @@ -0,0 +1,85 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Any, Dict, List, Optional +from pydantic import BaseModel, Field, StrictBool, StrictStr, conlist + + +class DeleteRequest(BaseModel): + """ + The request for the `Delete` operation. # noqa: E501 + """ + + ids: Optional[conlist(StrictStr)] = Field(None, description="Vectors to delete.") + delete_all: Optional[StrictBool] = Field( + False, + alias="deleteAll", + description="This indicates that all vectors in the index namespace should be deleted.", + ) + namespace: Optional[StrictStr] = Field(None, description="The namespace to delete vectors from, if applicable.") + filter: Optional[Dict[str, Any]] = Field( + None, + description="If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See https://www.pinecone.io/docs/metadata-filtering/.", + ) + __properties = ["ids", "deleteAll", "namespace", "filter"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> DeleteRequest: + """Create an instance of DeleteRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> DeleteRequest: + """Create an instance of DeleteRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return DeleteRequest.parse_obj(obj) + + _obj = DeleteRequest.parse_obj( + { + "ids": obj.get("ids"), + "delete_all": obj.get("deleteAll") if obj.get("deleteAll") is not None else False, + "namespace": obj.get("namespace"), + "filter": obj.get("filter"), + } + ) + return _obj diff --git a/pinecone/core/client/models/describe_index_stats_request.py b/pinecone/core/client/models/describe_index_stats_request.py new file mode 100644 index 00000000..63a7e9b7 --- /dev/null +++ b/pinecone/core/client/models/describe_index_stats_request.py @@ -0,0 +1,71 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Any, Dict, Optional +from pydantic import BaseModel, Field + + +class DescribeIndexStatsRequest(BaseModel): + """ + The request for the `DescribeIndexStats` operation. # noqa: E501 + """ + + filter: Optional[Dict[str, Any]] = Field( + None, + description="If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See https://www.pinecone.io/docs/metadata-filtering/.", + ) + __properties = ["filter"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> DescribeIndexStatsRequest: + """Create an instance of DescribeIndexStatsRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> DescribeIndexStatsRequest: + """Create an instance of DescribeIndexStatsRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return DescribeIndexStatsRequest.parse_obj(obj) + + _obj = DescribeIndexStatsRequest.parse_obj({"filter": obj.get("filter")}) + return _obj diff --git a/pinecone/core/client/models/describe_index_stats_response.py b/pinecone/core/client/models/describe_index_stats_response.py new file mode 100644 index 00000000..7a35ae47 --- /dev/null +++ b/pinecone/core/client/models/describe_index_stats_response.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Dict, Optional, Union +from pydantic import BaseModel, Field, StrictFloat, StrictInt +from pinecone.core.client.models.namespace_summary import NamespaceSummary + + +class DescribeIndexStatsResponse(BaseModel): + """ + The response for the `DescribeIndexStats` operation. # noqa: E501 + """ + + namespaces: Optional[Dict[str, NamespaceSummary]] = Field( + None, + description="A mapping for each namespace in the index from the namespace name to a summary of its contents. If a metadata filter expression is present, the summary will reflect only vectors matching that expression.", + ) + dimension: Optional[StrictInt] = Field(None, description="The dimension of the indexed vectors.") + index_fullness: Optional[Union[StrictFloat, StrictInt]] = Field( + None, + alias="indexFullness", + description="The fullness of the index, regardless of whether a metadata filter expression was passed. The granularity of this metric is 10%.", + ) + total_vector_count: Optional[StrictInt] = Field(None, alias="totalVectorCount") + __properties = ["namespaces", "dimension", "indexFullness", "totalVectorCount"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> DescribeIndexStatsResponse: + """Create an instance of DescribeIndexStatsResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of each value in namespaces (dict) + _field_dict = {} + if self.namespaces: + for _key in self.namespaces: + if self.namespaces[_key]: + _field_dict[_key] = self.namespaces[_key].to_dict() + _dict["namespaces"] = _field_dict + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> DescribeIndexStatsResponse: + """Create an instance of DescribeIndexStatsResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return DescribeIndexStatsResponse.parse_obj(obj) + + _obj = DescribeIndexStatsResponse.parse_obj( + { + "namespaces": dict((_k, NamespaceSummary.from_dict(_v)) for _k, _v in obj.get("namespaces").items()) + if obj.get("namespaces") is not None + else None, + "dimension": obj.get("dimension"), + "index_fullness": obj.get("indexFullness"), + "total_vector_count": obj.get("totalVectorCount"), + } + ) + return _obj diff --git a/pinecone/core/client/models/fetch_response.py b/pinecone/core/client/models/fetch_response.py new file mode 100644 index 00000000..3763b572 --- /dev/null +++ b/pinecone/core/client/models/fetch_response.py @@ -0,0 +1,84 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Dict, Optional +from pydantic import BaseModel, Field, StrictStr +from pinecone.core.client.models.vector import Vector + + +class FetchResponse(BaseModel): + """ + The response for the `Fetch` operation. # noqa: E501 + """ + + vectors: Optional[Dict[str, Vector]] = None + namespace: Optional[StrictStr] = Field(None, description="The namespace of the vectors.") + __properties = ["vectors", "namespace"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> FetchResponse: + """Create an instance of FetchResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of each value in vectors (dict) + _field_dict = {} + if self.vectors: + for _key in self.vectors: + if self.vectors[_key]: + _field_dict[_key] = self.vectors[_key].to_dict() + _dict["vectors"] = _field_dict + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> FetchResponse: + """Create an instance of FetchResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return FetchResponse.parse_obj(obj) + + _obj = FetchResponse.parse_obj( + { + "vectors": dict((_k, Vector.from_dict(_v)) for _k, _v in obj.get("vectors").items()) + if obj.get("vectors") is not None + else None, + "namespace": obj.get("namespace"), + } + ) + return _obj diff --git a/pinecone/core/client/models/hnsw_config.py b/pinecone/core/client/models/hnsw_config.py new file mode 100644 index 00000000..75b7d6ce --- /dev/null +++ b/pinecone/core/client/models/hnsw_config.py @@ -0,0 +1,78 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Optional +from pydantic import BaseModel, Field, StrictInt + + +class HnswConfig(BaseModel): + """ + HnswConfig + """ + + ef_construction: Optional[StrictInt] = 500 + ef: Optional[StrictInt] = 250 + m: Optional[StrictInt] = Field(12, alias="M") + max_elements: Optional[StrictInt] = 50000000 + __properties = ["ef_construction", "ef", "M", "max_elements"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> HnswConfig: + """Create an instance of HnswConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> HnswConfig: + """Create an instance of HnswConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return HnswConfig.parse_obj(obj) + + _obj = HnswConfig.parse_obj( + { + "ef_construction": obj.get("ef_construction") if obj.get("ef_construction") is not None else 500, + "ef": obj.get("ef") if obj.get("ef") is not None else 250, + "m": obj.get("M") if obj.get("M") is not None else 12, + "max_elements": obj.get("max_elements") if obj.get("max_elements") is not None else 50000000, + } + ) + return _obj diff --git a/pinecone/core/client/models/index_meta.py b/pinecone/core/client/models/index_meta.py new file mode 100644 index 00000000..348cf509 --- /dev/null +++ b/pinecone/core/client/models/index_meta.py @@ -0,0 +1,83 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from pydantic import BaseModel, Field +from pinecone.core.client.models.index_meta_database import IndexMetaDatabase +from pinecone.core.client.models.index_meta_status import IndexMetaStatus + + +class IndexMeta(BaseModel): + """ + IndexMeta + """ + + database: IndexMetaDatabase = Field(...) + status: IndexMetaStatus = Field(...) + __properties = ["database", "status"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> IndexMeta: + """Create an instance of IndexMeta from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of database + if self.database: + _dict["database"] = self.database.to_dict() + # override the default output from pydantic by calling `to_dict()` of status + if self.status: + _dict["status"] = self.status.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> IndexMeta: + """Create an instance of IndexMeta from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return IndexMeta.parse_obj(obj) + + _obj = IndexMeta.parse_obj( + { + "database": IndexMetaDatabase.from_dict(obj.get("database")) + if obj.get("database") is not None + else None, + "status": IndexMetaStatus.from_dict(obj.get("status")) if obj.get("status") is not None else None, + } + ) + return _obj diff --git a/pinecone/core/client/models/index_meta_database.py b/pinecone/core/client/models/index_meta_database.py new file mode 100644 index 00000000..ba1af18e --- /dev/null +++ b/pinecone/core/client/models/index_meta_database.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Any, Dict, Optional +from pydantic import BaseModel, Field, StrictInt, StrictStr +from pinecone.core.client.models.index_meta_database_index_config import IndexMetaDatabaseIndexConfig + + +class IndexMetaDatabase(BaseModel): + """ + IndexMetaDatabase + """ + + name: StrictStr = Field(...) + dimension: StrictStr = Field(...) + capacity_mode: StrictStr = Field(...) + index_type: Optional[StrictStr] = None + metric: StrictStr = Field(...) + pods: Optional[StrictInt] = None + replicas: Optional[StrictInt] = None + shards: Optional[StrictInt] = None + pod_type: Optional[StrictStr] = None + index_config: Optional[IndexMetaDatabaseIndexConfig] = None + metadata_config: Optional[Dict[str, Any]] = None + __properties = [ + "name", + "dimension", + "capacity_mode", + "index_type", + "metric", + "pods", + "replicas", + "shards", + "pod_type", + "index_config", + "metadata_config", + ] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> IndexMetaDatabase: + """Create an instance of IndexMetaDatabase from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of index_config + if self.index_config: + _dict["index_config"] = self.index_config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> IndexMetaDatabase: + """Create an instance of IndexMetaDatabase from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return IndexMetaDatabase.parse_obj(obj) + + _obj = IndexMetaDatabase.parse_obj( + { + "name": obj.get("name"), + "dimension": obj.get("dimension"), + "capacity_mode": obj.get("capacity_mode"), + "index_type": obj.get("index_type"), + "metric": obj.get("metric") if obj.get("metric") is not None else "cosine", + "pods": obj.get("pods"), + "replicas": obj.get("replicas"), + "shards": obj.get("shards"), + "pod_type": obj.get("pod_type"), + "index_config": IndexMetaDatabaseIndexConfig.from_dict(obj.get("index_config")) + if obj.get("index_config") is not None + else None, + "metadata_config": obj.get("metadata_config"), + } + ) + return _obj diff --git a/pinecone/core/client/models/index_meta_database_index_config.py b/pinecone/core/client/models/index_meta_database_index_config.py new file mode 100644 index 00000000..d58d078e --- /dev/null +++ b/pinecone/core/client/models/index_meta_database_index_config.py @@ -0,0 +1,140 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +from inspect import getfullargspec +import json +import pprint +import re # noqa: F401 + +from typing import Any, List, Optional +from pydantic import BaseModel, Field, StrictStr, ValidationError, validator +from pinecone.core.client.models.approximated_config import ApproximatedConfig +from typing import Union, Any, List, TYPE_CHECKING +from pydantic import StrictStr, Field + +INDEXMETADATABASEINDEXCONFIG_ONE_OF_SCHEMAS = ["ApproximatedConfig"] + + +class IndexMetaDatabaseIndexConfig(BaseModel): + """ + IndexMetaDatabaseIndexConfig + """ + + # data type: ApproximatedConfig + oneof_schema_1_validator: Optional[ApproximatedConfig] = None + if TYPE_CHECKING: + actual_instance: Union[ApproximatedConfig] + else: + actual_instance: Any + one_of_schemas: List[str] = Field(INDEXMETADATABASEINDEXCONFIG_ONE_OF_SCHEMAS, const=True) + + class Config: + validate_assignment = True + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @validator("actual_instance") + def actual_instance_must_validate_oneof(cls, v): + instance = IndexMetaDatabaseIndexConfig.construct() + error_messages = [] + match = 0 + # validate data type: ApproximatedConfig + if not isinstance(v, ApproximatedConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `ApproximatedConfig`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError( + "Multiple matches found when setting `actual_instance` in IndexMetaDatabaseIndexConfig with oneOf schemas: ApproximatedConfig. Details: " + + ", ".join(error_messages) + ) + elif match == 0: + # no match + raise ValueError( + "No match found when setting `actual_instance` in IndexMetaDatabaseIndexConfig with oneOf schemas: ApproximatedConfig. Details: " + + ", ".join(error_messages) + ) + else: + return v + + @classmethod + def from_dict(cls, obj: dict) -> IndexMetaDatabaseIndexConfig: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> IndexMetaDatabaseIndexConfig: + """Returns the object represented by the json string""" + instance = IndexMetaDatabaseIndexConfig.construct() + error_messages = [] + match = 0 + + # deserialize data into ApproximatedConfig + try: + instance.actual_instance = ApproximatedConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError( + "Multiple matches found when deserializing the JSON string into IndexMetaDatabaseIndexConfig with oneOf schemas: ApproximatedConfig. Details: " + + ", ".join(error_messages) + ) + elif match == 0: + # no match + raise ValueError( + "No match found when deserializing the JSON string into IndexMetaDatabaseIndexConfig with oneOf schemas: ApproximatedConfig. Details: " + + ", ".join(error_messages) + ) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + to_json = getattr(self.actual_instance, "to_json", None) + if callable(to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> dict: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + to_dict = getattr(self.actual_instance, "to_dict", None) + if callable(to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.dict()) diff --git a/pinecone/core/client/models/index_meta_status.py b/pinecone/core/client/models/index_meta_status.py new file mode 100644 index 00000000..fc76025c --- /dev/null +++ b/pinecone/core/client/models/index_meta_status.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from pydantic import BaseModel, Field, StrictBool, StrictInt, StrictStr, validator + + +class IndexMetaStatus(BaseModel): + """ + IndexMetaStatus + """ + + ready: StrictBool = Field(...) + state: StrictStr = Field(...) + host: StrictStr = Field(...) + port: StrictInt = Field(...) + __properties = ["ready", "state", "host", "port"] + + @validator("state") + def state_validate_enum(cls, value): + """Validates the enum""" + if value not in ( + "Initializing", + "InitializationFailed", + "ScalingUp", + "ScalingDown", + "ScalingUpPodSize", + "ScalingDownPodSize", + "Terminating", + "Ready", + ): + raise ValueError( + "must be one of enum values ('Initializing', 'InitializationFailed', 'ScalingUp', 'ScalingDown', 'ScalingUpPodSize', 'ScalingDownPodSize', 'Terminating', 'Ready')" + ) + return value + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> IndexMetaStatus: + """Create an instance of IndexMetaStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> IndexMetaStatus: + """Create an instance of IndexMetaStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return IndexMetaStatus.parse_obj(obj) + + _obj = IndexMetaStatus.parse_obj( + {"ready": obj.get("ready"), "state": obj.get("state"), "host": obj.get("host"), "port": obj.get("port")} + ) + return _obj diff --git a/pinecone/core/client/models/list_indexes200_response.py b/pinecone/core/client/models/list_indexes200_response.py new file mode 100644 index 00000000..9a94ef22 --- /dev/null +++ b/pinecone/core/client/models/list_indexes200_response.py @@ -0,0 +1,82 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import List, Optional +from pydantic import BaseModel, conlist +from pinecone.core.client.models.index_meta import IndexMeta + + +class ListIndexes200Response(BaseModel): + """ + ListIndexes200Response + """ + + databases: Optional[conlist(IndexMeta)] = None + __properties = ["databases"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> ListIndexes200Response: + """Create an instance of ListIndexes200Response from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of each item in databases (list) + _items = [] + if self.databases: + for _item in self.databases: + if _item: + _items.append(_item.to_dict()) + _dict["databases"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> ListIndexes200Response: + """Create an instance of ListIndexes200Response from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return ListIndexes200Response.parse_obj(obj) + + _obj = ListIndexes200Response.parse_obj( + { + "databases": [IndexMeta.from_dict(_item) for _item in obj.get("databases")] + if obj.get("databases") is not None + else None + } + ) + return _obj diff --git a/pinecone/core/client/models/namespace_summary.py b/pinecone/core/client/models/namespace_summary.py new file mode 100644 index 00000000..8d5fb118 --- /dev/null +++ b/pinecone/core/client/models/namespace_summary.py @@ -0,0 +1,72 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Optional +from pydantic import BaseModel, Field, StrictInt + + +class NamespaceSummary(BaseModel): + """ + A summary of the contents of a namespace. # noqa: E501 + """ + + vector_count: Optional[StrictInt] = Field( + None, + alias="vectorCount", + description="The number of vectors stored in this namespace. Note that updates to this field may lag behind updates to the underlying index and corresponding query results, etc.", + ) + __properties = ["vectorCount"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> NamespaceSummary: + """Create an instance of NamespaceSummary from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> NamespaceSummary: + """Create an instance of NamespaceSummary from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return NamespaceSummary.parse_obj(obj) + + _obj = NamespaceSummary.parse_obj({"vector_count": obj.get("vectorCount")}) + return _obj diff --git a/pinecone/core/client/models/patch_request.py b/pinecone/core/client/models/patch_request.py new file mode 100644 index 00000000..79ab8f29 --- /dev/null +++ b/pinecone/core/client/models/patch_request.py @@ -0,0 +1,72 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Optional +from pydantic import BaseModel, Field, StrictInt, StrictStr + + +class PatchRequest(BaseModel): + """ + PatchRequest + """ + + replicas: Optional[StrictInt] = Field(None, description="The desired number of replicas for the index.") + pod_type: Optional[StrictStr] = Field( + None, + description="The new pod type for the index. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`.", + ) + __properties = ["replicas", "pod_type"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> PatchRequest: + """Create an instance of PatchRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> PatchRequest: + """Create an instance of PatchRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return PatchRequest.parse_obj(obj) + + _obj = PatchRequest.parse_obj({"replicas": obj.get("replicas"), "pod_type": obj.get("pod_type")}) + return _obj diff --git a/pinecone/core/client/models/protobuf_any.py b/pinecone/core/client/models/protobuf_any.py new file mode 100644 index 00000000..d1114da9 --- /dev/null +++ b/pinecone/core/client/models/protobuf_any.py @@ -0,0 +1,69 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Optional, Union +from pydantic import BaseModel, Field, StrictBytes, StrictStr + + +class ProtobufAny(BaseModel): + """ + ProtobufAny + """ + + type_url: Optional[StrictStr] = Field(None, alias="typeUrl") + value: Optional[Union[StrictBytes, StrictStr]] = None + __properties = ["typeUrl", "value"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> ProtobufAny: + """Create an instance of ProtobufAny from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> ProtobufAny: + """Create an instance of ProtobufAny from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return ProtobufAny.parse_obj(obj) + + _obj = ProtobufAny.parse_obj({"type_url": obj.get("typeUrl"), "value": obj.get("value")}) + return _obj diff --git a/pinecone/core/client/models/protobuf_null_value.py b/pinecone/core/client/models/protobuf_null_value.py new file mode 100644 index 00000000..e90b5bd4 --- /dev/null +++ b/pinecone/core/client/models/protobuf_null_value.py @@ -0,0 +1,35 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import json +import pprint +import re # noqa: F401 +from aenum import Enum, no_arg + + +class ProtobufNullValue(str, Enum): + """ + `NullValue` is a singleton enumeration to represent the null value for the `Value` type union. The JSON representation for `NullValue` is JSON `null`. - NULL_VALUE: Null value. + """ + + """ + allowed enum values + """ + NULL_VALUE = "NULL_VALUE" + + @classmethod + def from_json(cls, json_str: str) -> ProtobufNullValue: + """Create an instance of ProtobufNullValue from a JSON string""" + return ProtobufNullValue(json.loads(json_str)) diff --git a/pinecone/core/client/models/query_request.py b/pinecone/core/client/models/query_request.py new file mode 100644 index 00000000..19787a26 --- /dev/null +++ b/pinecone/core/client/models/query_request.py @@ -0,0 +1,134 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Any, Dict, List, Optional, Union +from pydantic import BaseModel, Field, StrictBool, StrictFloat, StrictInt, StrictStr, conint, conlist, constr +from pinecone.core.client.models.query_vector import QueryVector +from pinecone.core.client.models.sparse_values import SparseValues + + +class QueryRequest(BaseModel): + """ + The request for the `Query` operation. # noqa: E501 + """ + + namespace: Optional[StrictStr] = Field(None, description="The namespace to query.") + top_k: conint(strict=True, le=10000, ge=1) = Field( + ..., alias="topK", description="The number of results to return for each query." + ) + filter: Optional[Dict[str, Any]] = Field( + None, + description="The filter to apply. You can use vector metadata to limit your search. See https://www.pinecone.io/docs/metadata-filtering/.", + ) + include_values: Optional[StrictBool] = Field( + False, alias="includeValues", description="Indicates whether vector values are included in the response." + ) + include_metadata: Optional[StrictBool] = Field( + False, + alias="includeMetadata", + description="Indicates whether metadata is included in the response as well as the ids.", + ) + queries: Optional[conlist(QueryVector)] = Field( + None, + description="DEPRECATED. The query vectors. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`.", + ) + vector: Optional[conlist(Union[StrictFloat, StrictInt])] = Field( + None, + description="The query vector. This should be the same length as the dimension of the index being queried. Each `query()` request can contain only one of the parameters `id` or `vector`.", + ) + sparse_vector: Optional[SparseValues] = Field(None, alias="sparseVector") + id: Optional[constr(strict=True, max_length=512)] = Field( + None, + description="The unique ID of the vector to be used as a query vector. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`.", + ) + __properties = [ + "namespace", + "topK", + "filter", + "includeValues", + "includeMetadata", + "queries", + "vector", + "sparseVector", + "id", + ] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> QueryRequest: + """Create an instance of QueryRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of each item in queries (list) + _items = [] + if self.queries: + for _item in self.queries: + if _item: + _items.append(_item.to_dict()) + _dict["queries"] = _items + # override the default output from pydantic by calling `to_dict()` of sparse_vector + if self.sparse_vector: + _dict["sparseVector"] = self.sparse_vector.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> QueryRequest: + """Create an instance of QueryRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return QueryRequest.parse_obj(obj) + + _obj = QueryRequest.parse_obj( + { + "namespace": obj.get("namespace"), + "top_k": obj.get("topK"), + "filter": obj.get("filter"), + "include_values": obj.get("includeValues") if obj.get("includeValues") is not None else False, + "include_metadata": obj.get("includeMetadata") if obj.get("includeMetadata") is not None else False, + "queries": [QueryVector.from_dict(_item) for _item in obj.get("queries")] + if obj.get("queries") is not None + else None, + "vector": obj.get("vector"), + "sparse_vector": SparseValues.from_dict(obj.get("sparseVector")) + if obj.get("sparseVector") is not None + else None, + "id": obj.get("id"), + } + ) + return _obj diff --git a/pinecone/core/client/models/query_response.py b/pinecone/core/client/models/query_response.py new file mode 100644 index 00000000..5c4f6bf2 --- /dev/null +++ b/pinecone/core/client/models/query_response.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import List, Optional +from pydantic import BaseModel, Field, StrictStr, conlist +from pinecone.core.client.models.scored_vector import ScoredVector +from pinecone.core.client.models.single_query_results import SingleQueryResults + + +class QueryResponse(BaseModel): + """ + The response for the `Query` operation. These are the matches found for a particular query vector. The matches are ordered from most similar to least similar. # noqa: E501 + """ + + results: Optional[conlist(SingleQueryResults)] = Field( + None, description="DEPRECATED. The results of each query. The order is the same as `QueryRequest.queries`." + ) + matches: Optional[conlist(ScoredVector)] = Field(None, description="The matches for the vectors.") + namespace: Optional[StrictStr] = Field(None, description="The namespace for the vectors.") + __properties = ["results", "matches", "namespace"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> QueryResponse: + """Create an instance of QueryResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of each item in results (list) + _items = [] + if self.results: + for _item in self.results: + if _item: + _items.append(_item.to_dict()) + _dict["results"] = _items + # override the default output from pydantic by calling `to_dict()` of each item in matches (list) + _items = [] + if self.matches: + for _item in self.matches: + if _item: + _items.append(_item.to_dict()) + _dict["matches"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> QueryResponse: + """Create an instance of QueryResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return QueryResponse.parse_obj(obj) + + _obj = QueryResponse.parse_obj( + { + "results": [SingleQueryResults.from_dict(_item) for _item in obj.get("results")] + if obj.get("results") is not None + else None, + "matches": [ScoredVector.from_dict(_item) for _item in obj.get("matches")] + if obj.get("matches") is not None + else None, + "namespace": obj.get("namespace"), + } + ) + return _obj diff --git a/pinecone/core/client/models/query_vector.py b/pinecone/core/client/models/query_vector.py new file mode 100644 index 00000000..544bc6fd --- /dev/null +++ b/pinecone/core/client/models/query_vector.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Any, Dict, List, Optional, Union +from pydantic import BaseModel, Field, StrictFloat, StrictInt, StrictStr, conint, conlist +from pinecone.core.client.models.sparse_values import SparseValues + + +class QueryVector(BaseModel): + """ + A single query vector within a `QueryRequest`. # noqa: E501 + """ + + values: conlist(Union[StrictFloat, StrictInt]) = Field( + ..., + description="The query vector values. This should be the same length as the dimension of the index being queried.", + ) + sparse_values: Optional[SparseValues] = Field(None, alias="sparseValues") + top_k: Optional[conint(strict=True, le=10000, ge=1)] = Field( + None, alias="topK", description="An override for the number of results to return for this query vector." + ) + namespace: Optional[StrictStr] = Field(None, description="An override the namespace to search.") + filter: Optional[Dict[str, Any]] = Field( + None, description="An override for the metadata filter to apply. This replaces the request-level filter." + ) + __properties = ["values", "sparseValues", "topK", "namespace", "filter"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> QueryVector: + """Create an instance of QueryVector from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of sparse_values + if self.sparse_values: + _dict["sparseValues"] = self.sparse_values.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> QueryVector: + """Create an instance of QueryVector from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return QueryVector.parse_obj(obj) + + _obj = QueryVector.parse_obj( + { + "values": obj.get("values"), + "sparse_values": SparseValues.from_dict(obj.get("sparseValues")) + if obj.get("sparseValues") is not None + else None, + "top_k": obj.get("topK"), + "namespace": obj.get("namespace"), + "filter": obj.get("filter"), + } + ) + return _obj diff --git a/pinecone/core/client/models/rpc_status.py b/pinecone/core/client/models/rpc_status.py new file mode 100644 index 00000000..e02eadb6 --- /dev/null +++ b/pinecone/core/client/models/rpc_status.py @@ -0,0 +1,86 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import List, Optional +from pydantic import BaseModel, StrictInt, StrictStr, conlist +from pinecone.core.client.models.protobuf_any import ProtobufAny + + +class RpcStatus(BaseModel): + """ + RpcStatus + """ + + code: Optional[StrictInt] = None + message: Optional[StrictStr] = None + details: Optional[conlist(ProtobufAny)] = None + __properties = ["code", "message", "details"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> RpcStatus: + """Create an instance of RpcStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of each item in details (list) + _items = [] + if self.details: + for _item in self.details: + if _item: + _items.append(_item.to_dict()) + _dict["details"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> RpcStatus: + """Create an instance of RpcStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return RpcStatus.parse_obj(obj) + + _obj = RpcStatus.parse_obj( + { + "code": obj.get("code"), + "message": obj.get("message"), + "details": [ProtobufAny.from_dict(_item) for _item in obj.get("details")] + if obj.get("details") is not None + else None, + } + ) + return _obj diff --git a/pinecone/core/client/models/scored_vector.py b/pinecone/core/client/models/scored_vector.py new file mode 100644 index 00000000..bf983eae --- /dev/null +++ b/pinecone/core/client/models/scored_vector.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Any, Dict, List, Optional, Union +from pydantic import BaseModel, Field, StrictFloat, StrictInt, conlist, constr +from pinecone.core.client.models.sparse_values import SparseValues + + +class ScoredVector(BaseModel): + """ + ScoredVector + """ + + id: constr(strict=True, max_length=512, min_length=1) = Field(..., description="This is the vector's unique id.") + score: Optional[Union[StrictFloat, StrictInt]] = Field( + None, + description="This is a measure of similarity between this vector and the query vector. The higher the score, the more they are similar.", + ) + values: Optional[conlist(Union[StrictFloat, StrictInt])] = Field( + None, description="This is the vector data, if it is requested." + ) + sparse_values: Optional[SparseValues] = Field(None, alias="sparseValues") + metadata: Optional[Dict[str, Any]] = Field(None, description="This is the metadata, if it is requested.") + __properties = ["id", "score", "values", "sparseValues", "metadata"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> ScoredVector: + """Create an instance of ScoredVector from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of sparse_values + if self.sparse_values: + _dict["sparseValues"] = self.sparse_values.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> ScoredVector: + """Create an instance of ScoredVector from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return ScoredVector.parse_obj(obj) + + _obj = ScoredVector.parse_obj( + { + "id": obj.get("id"), + "score": obj.get("score"), + "values": obj.get("values"), + "sparse_values": SparseValues.from_dict(obj.get("sparseValues")) + if obj.get("sparseValues") is not None + else None, + "metadata": obj.get("metadata"), + } + ) + return _obj diff --git a/pinecone/core/client/models/single_query_results.py b/pinecone/core/client/models/single_query_results.py new file mode 100644 index 00000000..91ef3c00 --- /dev/null +++ b/pinecone/core/client/models/single_query_results.py @@ -0,0 +1,84 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import List, Optional +from pydantic import BaseModel, Field, StrictStr, conlist +from pinecone.core.client.models.scored_vector import ScoredVector + + +class SingleQueryResults(BaseModel): + """ + SingleQueryResults + """ + + matches: Optional[conlist(ScoredVector)] = Field(None, description="The matches for the vectors.") + namespace: Optional[StrictStr] = Field(None, description="The namespace for the vectors.") + __properties = ["matches", "namespace"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> SingleQueryResults: + """Create an instance of SingleQueryResults from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of each item in matches (list) + _items = [] + if self.matches: + for _item in self.matches: + if _item: + _items.append(_item.to_dict()) + _dict["matches"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> SingleQueryResults: + """Create an instance of SingleQueryResults from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return SingleQueryResults.parse_obj(obj) + + _obj = SingleQueryResults.parse_obj( + { + "matches": [ScoredVector.from_dict(_item) for _item in obj.get("matches")] + if obj.get("matches") is not None + else None, + "namespace": obj.get("namespace"), + } + ) + return _obj diff --git a/pinecone/core/client/models/sparse_values.py b/pinecone/core/client/models/sparse_values.py new file mode 100644 index 00000000..c72173d3 --- /dev/null +++ b/pinecone/core/client/models/sparse_values.py @@ -0,0 +1,72 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import List, Union +from pydantic import BaseModel, Field, StrictFloat, StrictInt, conlist + + +class SparseValues(BaseModel): + """ + Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. # noqa: E501 + """ + + indices: conlist(StrictInt) = Field(..., description="The indices of the sparse data.") + values: conlist(Union[StrictFloat, StrictInt]) = Field( + ..., + description="The corresponding values of the sparse data, which must be with the same length as the indices.", + ) + __properties = ["indices", "values"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> SparseValues: + """Create an instance of SparseValues from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> SparseValues: + """Create an instance of SparseValues from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return SparseValues.parse_obj(obj) + + _obj = SparseValues.parse_obj({"indices": obj.get("indices"), "values": obj.get("values")}) + return _obj diff --git a/pinecone/core/client/models/update_request.py b/pinecone/core/client/models/update_request.py new file mode 100644 index 00000000..a04a3920 --- /dev/null +++ b/pinecone/core/client/models/update_request.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Any, Dict, List, Optional, Union +from pydantic import BaseModel, Field, StrictFloat, StrictInt, StrictStr, conlist, constr +from pinecone.core.client.models.sparse_values import SparseValues + + +class UpdateRequest(BaseModel): + """ + The request for the `Upsert` operation. # noqa: E501 + """ + + id: constr(strict=True, max_length=512, min_length=1) = Field(..., description="Vector's unique id.") + values: Optional[conlist(Union[StrictFloat, StrictInt])] = Field(None, description="Vector data.") + sparse_values: Optional[SparseValues] = Field(None, alias="sparseValues") + set_metadata: Optional[Dict[str, Any]] = Field( + None, alias="setMetadata", description="Metadata to *set* for the vector." + ) + namespace: Optional[StrictStr] = Field(None, description="Namespace name where to update the vector.") + __properties = ["id", "values", "sparseValues", "setMetadata", "namespace"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> UpdateRequest: + """Create an instance of UpdateRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of sparse_values + if self.sparse_values: + _dict["sparseValues"] = self.sparse_values.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> UpdateRequest: + """Create an instance of UpdateRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return UpdateRequest.parse_obj(obj) + + _obj = UpdateRequest.parse_obj( + { + "id": obj.get("id"), + "values": obj.get("values"), + "sparse_values": SparseValues.from_dict(obj.get("sparseValues")) + if obj.get("sparseValues") is not None + else None, + "set_metadata": obj.get("setMetadata"), + "namespace": obj.get("namespace"), + } + ) + return _obj diff --git a/pinecone/core/client/models/upsert_request.py b/pinecone/core/client/models/upsert_request.py new file mode 100644 index 00000000..11518db5 --- /dev/null +++ b/pinecone/core/client/models/upsert_request.py @@ -0,0 +1,86 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import List, Optional +from pydantic import BaseModel, Field, StrictStr, conlist +from pinecone.core.client.models.vector import Vector + + +class UpsertRequest(BaseModel): + """ + The request for the `Upsert` operation. # noqa: E501 + """ + + vectors: conlist(Vector) = Field( + ..., description="An array containing the vectors to upsert. Recommended batch limit is 100 vectors." + ) + namespace: Optional[StrictStr] = Field(None, description="This is the namespace name where you upsert vectors.") + __properties = ["vectors", "namespace"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> UpsertRequest: + """Create an instance of UpsertRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of each item in vectors (list) + _items = [] + if self.vectors: + for _item in self.vectors: + if _item: + _items.append(_item.to_dict()) + _dict["vectors"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> UpsertRequest: + """Create an instance of UpsertRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return UpsertRequest.parse_obj(obj) + + _obj = UpsertRequest.parse_obj( + { + "vectors": [Vector.from_dict(_item) for _item in obj.get("vectors")] + if obj.get("vectors") is not None + else None, + "namespace": obj.get("namespace"), + } + ) + return _obj diff --git a/pinecone/core/client/models/upsert_response.py b/pinecone/core/client/models/upsert_response.py new file mode 100644 index 00000000..cd17799e --- /dev/null +++ b/pinecone/core/client/models/upsert_response.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Optional +from pydantic import BaseModel, Field, StrictInt + + +class UpsertResponse(BaseModel): + """ + The response for the `Upsert` operation. # noqa: E501 + """ + + upserted_count: Optional[StrictInt] = Field( + None, alias="upsertedCount", description="The number of vectors upserted." + ) + __properties = ["upsertedCount"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> UpsertResponse: + """Create an instance of UpsertResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> UpsertResponse: + """Create an instance of UpsertResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return UpsertResponse.parse_obj(obj) + + _obj = UpsertResponse.parse_obj({"upserted_count": obj.get("upsertedCount")}) + return _obj diff --git a/pinecone/core/client/models/vector.py b/pinecone/core/client/models/vector.py new file mode 100644 index 00000000..96375181 --- /dev/null +++ b/pinecone/core/client/models/vector.py @@ -0,0 +1,86 @@ +# coding: utf-8 + +""" + Pinecone API + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) + + The version of the OpenAPI document: version not set + Contact: support@pinecone.io + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + + +from typing import Any, Dict, List, Optional, Union +from pydantic import BaseModel, Field, StrictFloat, StrictInt, conlist, constr +from pinecone.core.client.models.sparse_values import SparseValues + + +class Vector(BaseModel): + """ + Vector + """ + + id: constr(strict=True, max_length=512, min_length=1) = Field(..., description="This is the vector's unique id.") + values: conlist(Union[StrictFloat, StrictInt]) = Field( + ..., description="This is the vector data included in the request." + ) + sparse_values: Optional[SparseValues] = Field(None, alias="sparseValues") + metadata: Optional[Dict[str, Any]] = Field(None, description="This is the metadata included in the request.") + __properties = ["id", "values", "sparseValues", "metadata"] + + class Config: + """Pydantic configuration""" + + allow_population_by_field_name = True + validate_assignment = True + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.dict(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Vector: + """Create an instance of Vector from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self): + """Returns the dictionary representation of the model using alias""" + _dict = self.dict(by_alias=True, exclude={}, exclude_none=True) + # override the default output from pydantic by calling `to_dict()` of sparse_values + if self.sparse_values: + _dict["sparseValues"] = self.sparse_values.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: dict) -> Vector: + """Create an instance of Vector from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return Vector.parse_obj(obj) + + _obj = Vector.parse_obj( + { + "id": obj.get("id"), + "values": obj.get("values"), + "sparse_values": SparseValues.from_dict(obj.get("sparseValues")) + if obj.get("sparseValues") is not None + else None, + "metadata": obj.get("metadata"), + } + ) + return _obj diff --git a/pinecone/core/client/py.typed b/pinecone/core/client/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/pinecone/core/client/rest.py b/pinecone/core/client/rest.py index b986712b..4cbc0c0d 100644 --- a/pinecone/core/client/rest.py +++ b/pinecone/core/client/rest.py @@ -1,12 +1,16 @@ +# coding: utf-8 + """ Pinecone API - No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) The version of the OpenAPI document: version not set Contact: support@pinecone.io - Generated by: https://openapi-generator.tech -""" + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 import io @@ -14,8 +18,8 @@ import logging import re import ssl -from urllib.parse import urlencode +from urllib.parse import urlencode, quote_plus import urllib3 from pinecone.core.client.exceptions import ( @@ -25,6 +29,7 @@ NotFoundException, ServiceException, ApiValueError, + BadRequestException, ) @@ -32,7 +37,7 @@ class RESTResponse(io.IOBase): - def __init__(self, resp): + def __init__(self, resp) -> None: self.urllib3_response = resp self.status = resp.status self.reason = resp.reason @@ -47,8 +52,8 @@ def getheader(self, name, default=None): return self.urllib3_response.headers.get(name, default) -class RESTClientObject(object): - def __init__(self, configuration, pools_size=4, maxsize=None): +class RESTClientObject: + def __init__(self, configuration, pools_size=4, maxsize=None) -> None: # urllib3.PoolManager will pass all kw parameters to connectionpool # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 @@ -68,6 +73,9 @@ def __init__(self, configuration, pools_size=4, maxsize=None): if configuration.retries is not None: addition_pool_args["retries"] = configuration.retries + if configuration.tls_server_name: + addition_pool_args["server_hostname"] = configuration.tls_server_name + if configuration.socket_options is not None: addition_pool_args["socket_options"] = configuration.socket_options @@ -88,7 +96,7 @@ def __init__(self, configuration, pools_size=4, maxsize=None): key_file=configuration.key_file, proxy_url=configuration.proxy, proxy_headers=configuration.proxy_headers, - **addition_pool_args + **addition_pool_args, ) else: self.pool_manager = urllib3.PoolManager( @@ -98,7 +106,7 @@ def __init__(self, configuration, pools_size=4, maxsize=None): ca_certs=configuration.ssl_ca_cert, cert_file=configuration.cert_file, key_file=configuration.key_file, - **addition_pool_args + **addition_pool_args, ) def request( @@ -138,6 +146,9 @@ def request( post_params = post_params or {} headers = headers or {} + # url already contains the URL query string + # so reset query_params to empty dict + query_params = {} timeout = None if _request_timeout: @@ -149,12 +160,8 @@ def request( try: # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` if method in ["POST", "PUT", "PATCH", "OPTIONS", "DELETE"]: - # Only set a default Content-Type for POST, PUT, PATCH and OPTIONS requests - if (method != "DELETE") and ("Content-Type" not in headers): - headers["Content-Type"] = "application/json" - if query_params: - url += "?" + urlencode(query_params) - if ("Content-Type" not in headers) or (re.search("json", headers["Content-Type"], re.IGNORECASE)): + # no content type provided or payload is json + if not headers.get("Content-Type") or re.search("json", headers["Content-Type"], re.IGNORECASE): request_body = None if body is not None: request_body = json.dumps(body) @@ -212,7 +219,7 @@ def request( # For `GET`, `HEAD` else: r = self.pool_manager.request( - method, url, fields=query_params, preload_content=_preload_content, timeout=timeout, headers=headers + method, url, fields={}, preload_content=_preload_content, timeout=timeout, headers=headers ) except urllib3.exceptions.SSLError as e: msg = "{0}\n{1}".format(type(e).__name__, str(e)) @@ -225,6 +232,9 @@ def request( logger.debug("response body: %s", r.data) if not 200 <= r.status <= 299: + if r.status == 400: + raise BadRequestException(http_resp=r) + if r.status == 401: raise UnauthorizedException(http_resp=r) @@ -241,7 +251,7 @@ def request( return r - def GET(self, url, headers=None, query_params=None, _preload_content=True, _request_timeout=None): + def get_request(self, url, headers=None, query_params=None, _preload_content=True, _request_timeout=None): return self.request( "GET", url, @@ -251,7 +261,7 @@ def GET(self, url, headers=None, query_params=None, _preload_content=True, _requ query_params=query_params, ) - def HEAD(self, url, headers=None, query_params=None, _preload_content=True, _request_timeout=None): + def head_request(self, url, headers=None, query_params=None, _preload_content=True, _request_timeout=None): return self.request( "HEAD", url, @@ -261,7 +271,7 @@ def HEAD(self, url, headers=None, query_params=None, _preload_content=True, _req query_params=query_params, ) - def OPTIONS( + def options_request( self, url, headers=None, @@ -282,7 +292,9 @@ def OPTIONS( body=body, ) - def DELETE(self, url, headers=None, query_params=None, body=None, _preload_content=True, _request_timeout=None): + def delete_request( + self, url, headers=None, query_params=None, body=None, _preload_content=True, _request_timeout=None + ): return self.request( "DELETE", url, @@ -293,7 +305,7 @@ def DELETE(self, url, headers=None, query_params=None, body=None, _preload_conte body=body, ) - def POST( + def post_request( self, url, headers=None, @@ -314,7 +326,7 @@ def POST( body=body, ) - def PUT( + def put_request( self, url, headers=None, @@ -335,7 +347,7 @@ def PUT( body=body, ) - def PATCH( + def patch_request( self, url, headers=None, diff --git a/pinecone/core/exceptions.py b/pinecone/core/exceptions.py deleted file mode 100644 index cd457772..00000000 --- a/pinecone/core/exceptions.py +++ /dev/null @@ -1,6 +0,0 @@ -class PineconeException(Exception): - """The base exception class for all Pinecone client exceptions.""" - - -class PineconeProtocolError(PineconeException): - """Raised when something unexpected happens mid-request/response.""" diff --git a/pinecone/core/grpc/__init__.py b/pinecone/core/grpc/__init__.py deleted file mode 100644 index 8b137891..00000000 --- a/pinecone/core/grpc/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/pinecone/core/grpc/protos/__init__.py b/pinecone/core/grpc/protos/__init__.py deleted file mode 100644 index 8b137891..00000000 --- a/pinecone/core/grpc/protos/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/pinecone/exceptions.py b/pinecone/exceptions.py index ce23887f..62da708f 100644 --- a/pinecone/exceptions.py +++ b/pinecone/exceptions.py @@ -1,4 +1,3 @@ -from .core.exceptions import PineconeException, PineconeProtocolError from .core.client.exceptions import ( OpenApiException, ApiAttributeError, @@ -12,6 +11,12 @@ ServiceException, ) +class PineconeException(Exception): + """The base exception class for all Pinecone client exceptions.""" + +class PineconeProtocolError(PineconeException): + """Raised when something unexpected happens mid-request/response.""" + __all__ = [ "PineconeException", "PineconeProtocolError", diff --git a/pinecone/grpc/__init__.py b/pinecone/grpc/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pinecone/core/grpc/index_grpc.py b/pinecone/grpc/index_grpc.py similarity index 98% rename from pinecone/core/grpc/index_grpc.py rename to pinecone/grpc/index_grpc.py index f62e9ec8..1e35300b 100644 --- a/pinecone/core/grpc/index_grpc.py +++ b/pinecone/grpc/index_grpc.py @@ -15,8 +15,8 @@ from pinecone import FetchResponse, QueryResponse, ScoredVector, SingleQueryResults, DescribeIndexStatsResponse from pinecone.config import Config -from pinecone.core.client.model.namespace_summary import NamespaceSummary -from pinecone.core.client.model.vector import Vector as _Vector +from pinecone.core.client.models.namespace_summary import NamespaceSummary +from pinecone.core.client.models.vector import Vector as _Vector from pinecone.core.grpc.protos.vector_service_pb2 import ( Vector as GRPCVector, QueryVector as GRPCQueryVector, @@ -31,11 +31,11 @@ UpdateResponse, SparseValues as GRPCSparseValues, ) -from pinecone.core.client.model.sparse_values import SparseValues +from pinecone.core.client.models.sparse_values import SparseValues from pinecone.core.grpc.protos.vector_service_pb2_grpc import VectorServiceStub -from pinecone.core.grpc.retry import RetryOnRpcErrorClientInterceptor, RetryConfig -from pinecone.core.utils import _generate_request_id, dict_to_proto_struct, fix_tuple_length -from pinecone.core.utils.constants import ( +from pinecone.grpc.retry import RetryOnRpcErrorClientInterceptor, RetryConfig +from pinecone.utils import _generate_request_id, dict_to_proto_struct, fix_tuple_length +from pinecone.utils.constants import ( MAX_MSG_SIZE, REQUEST_ID, CLIENT_VERSION, @@ -349,7 +349,7 @@ def upsert( namespace: Optional[str] = None, batch_size: Optional[int] = None, show_progress: bool = True, - **kwargs + **kwargs, ) -> Union[UpsertResponse, PineconeGrpcFuture]: """ The upsert operation writes vectors into a namespace. @@ -565,7 +565,7 @@ def delete( namespace: Optional[str] = None, filter: Optional[Dict[str, Union[str, float, int, bool, List, dict]]] = None, async_req: bool = False, - **kwargs + **kwargs, ) -> Union[DeleteResponse, PineconeGrpcFuture]: """ The Delete operation deletes vectors from the index, from a single namespace. @@ -650,7 +650,7 @@ def query( include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, sparse_vector: Optional[Union[GRPCSparseValues, Dict[str, Union[List[float], List[int]]]]] = None, - **kwargs + **kwargs, ) -> QueryResponse: """ The Query operation searches a namespace, using a query vector. @@ -740,7 +740,7 @@ def update( set_metadata: Optional[Dict[str, Union[str, float, int, bool, List[int], List[float], List[str]]]] = None, namespace: Optional[str] = None, sparse_values: Optional[Union[GRPCSparseValues, Dict[str, Union[List[float], List[int]]]]] = None, - **kwargs + **kwargs, ) -> Union[UpdateResponse, PineconeGrpcFuture]: """ The Update operation updates vector in a namespace. @@ -840,4 +840,4 @@ def _parse_sparse_values_arg( f"Received: {sparse_values}" ) - return GRPCSparseValues(indices=sparse_values["indices"], values=sparse_values["values"]) + return GRPCSparseValues(indices=sparse_values["indices"], values=sparse_values["values"]) \ No newline at end of file diff --git a/pinecone/core/grpc/protos/vector_column_service_pb2.py b/pinecone/grpc/protos/vector_column_service_pb2.py similarity index 99% rename from pinecone/core/grpc/protos/vector_column_service_pb2.py rename to pinecone/grpc/protos/vector_column_service_pb2.py index 1b4ebadc..cab350a6 100644 --- a/pinecone/core/grpc/protos/vector_column_service_pb2.py +++ b/pinecone/grpc/protos/vector_column_service_pb2.py @@ -1290,4 +1290,4 @@ DESCRIPTOR.services_by_name["VectorColumnService"] = _VECTORCOLUMNSERVICE -# @@protoc_insertion_point(module_scope) +# @@protoc_insertion_point(module_scope) \ No newline at end of file diff --git a/pinecone/core/grpc/protos/vector_column_service_pb2.pyi b/pinecone/grpc/protos/vector_column_service_pb2.pyi similarity index 99% rename from pinecone/core/grpc/protos/vector_column_service_pb2.pyi rename to pinecone/grpc/protos/vector_column_service_pb2.pyi index 3b85ddb1..3b0176f6 100644 --- a/pinecone/core/grpc/protos/vector_column_service_pb2.pyi +++ b/pinecone/grpc/protos/vector_column_service_pb2.pyi @@ -347,4 +347,4 @@ class DescribeIndexStatsResponse(google___protobuf___message___Message): self, field_name: typing_extensions___Literal["dimension", b"dimension", "namespaces", b"namespaces"] ) -> None: ... -type___DescribeIndexStatsResponse = DescribeIndexStatsResponse +type___DescribeIndexStatsResponse = DescribeIndexStatsResponse \ No newline at end of file diff --git a/pinecone/core/grpc/protos/vector_column_service_pb2_grpc.py b/pinecone/grpc/protos/vector_column_service_pb2_grpc.py similarity index 99% rename from pinecone/core/grpc/protos/vector_column_service_pb2_grpc.py rename to pinecone/grpc/protos/vector_column_service_pb2_grpc.py index 677e12b4..93db5020 100644 --- a/pinecone/core/grpc/protos/vector_column_service_pb2_grpc.py +++ b/pinecone/grpc/protos/vector_column_service_pb2_grpc.py @@ -264,4 +264,4 @@ def DescribeIndexStats( wait_for_ready, timeout, metadata, - ) + ) \ No newline at end of file diff --git a/pinecone/core/grpc/retry.py b/pinecone/grpc/retry.py similarity index 99% rename from pinecone/core/grpc/retry.py rename to pinecone/grpc/retry.py index b2718288..e1ec306a 100644 --- a/pinecone/core/grpc/retry.py +++ b/pinecone/grpc/retry.py @@ -84,4 +84,4 @@ class RetryConfig(NamedTuple): max_attempts: int = 4 sleep_policy: SleepPolicy = ExponentialBackoff(init_backoff_ms=100, max_backoff_ms=1600, multiplier=2) - retryable_status: Optional[Tuple[grpc.StatusCode, ...]] = (grpc.StatusCode.UNAVAILABLE,) + retryable_status: Optional[Tuple[grpc.StatusCode, ...]] = (grpc.StatusCode.UNAVAILABLE,) \ No newline at end of file diff --git a/pinecone/index.py b/pinecone/index.py index 64d66d67..663645d6 100644 --- a/pinecone/index.py +++ b/pinecone/index.py @@ -6,7 +6,7 @@ from collections.abc import Iterable, Mapping from typing import Union, List, Tuple, Optional, Dict, Any -from .core.client.model.sparse_values import SparseValues +from .core.client.models.sparse_values import SparseValues from pinecone import Config from pinecone.core.client import ApiClient from .core.client.models import ( @@ -28,7 +28,7 @@ DescribeIndexStatsRequest, ) from pinecone.core.client.api.vector_operations_api import VectorOperationsApi -from pinecone.core.utils import fix_tuple_length, get_user_agent, warn_deprecated +from .utils import fix_tuple_length, get_user_agent, warn_deprecated import copy __all__ = [ @@ -52,8 +52,8 @@ "SparseValues", ] -from .core.utils.constants import REQUIRED_VECTOR_FIELDS, OPTIONAL_VECTOR_FIELDS -from .core.utils.error_handling import validate_and_convert_errors +from .utils.constants import REQUIRED_VECTOR_FIELDS, OPTIONAL_VECTOR_FIELDS +from .utils.error_handling import validate_and_convert_errors _OPENAPI_ENDPOINT_PARAMS = ( "_return_http_data_only", @@ -68,10 +68,10 @@ def parse_query_response(response: QueryResponse, unary_query: bool): if unary_query: - response._data_store.pop("results", None) + response.results = None else: - response._data_store.pop("matches", None) - response._data_store.pop("namespace", None) + response.matches = None + response.namespace = None return response diff --git a/pinecone/info.py b/pinecone/info.py deleted file mode 100644 index 36f5ae2e..00000000 --- a/pinecone/info.py +++ /dev/null @@ -1,35 +0,0 @@ -from pinecone.core.api_action import ActionAPI, VersionResponse, WhoAmIResponse -from pinecone.config import Config - -import time -import requests - -__all__ = ["version", "whoami", "VersionResponse", "WhoAmIResponse"] - - -def _get_action_api(): - return ActionAPI(host=Config.CONTROLLER_HOST, api_key=Config.API_KEY) - - -def version() -> VersionResponse: - """Returns version information (client and server).""" - api = _get_action_api() - return api.version() - - -def whoami() -> WhoAmIResponse: - """Returns the details of the currently authenticated API key.""" - api = _get_action_api() - return api.whoami() - - -def wait_controller_ready(timeout: int = 30): - connection = False - max_time = time.time() + timeout - while (not connection) and time.time() < max_time: - try: - version() - time.sleep(3) - connection = True - except requests.exceptions.ConnectionError: - time.sleep(1) diff --git a/pinecone/manage.py b/pinecone/manage.py index 76541edf..5f3227a8 100644 --- a/pinecone/manage.py +++ b/pinecone/manage.py @@ -5,10 +5,10 @@ from pinecone.config import Config from pinecone.core.client.api.index_operations_api import IndexOperationsApi from pinecone.core.client.api_client import ApiClient -from pinecone.core.client.model.create_request import CreateRequest -from pinecone.core.client.model.patch_request import PatchRequest -from pinecone.core.client.model.create_collection_request import CreateCollectionRequest -from pinecone.core.utils import get_user_agent +from pinecone.core.client.models.create_request import CreateRequest +from pinecone.core.client.models.patch_request import PatchRequest +from pinecone.core.client.models.create_collection_request import CreateCollectionRequest +from pinecone.utils import get_user_agent __all__ = [ "create_index", @@ -73,6 +73,9 @@ def _get_status(name: str): def create_index( name: str, dimension: int, + cloud: str, + region: str, + capacity_mode: str, timeout: int = None, index_type: str = "approximated", metric: str = "cosine", @@ -89,6 +92,9 @@ def create_index( :param name: the name of the index. :type name: str :param dimension: the dimension of vectors that would be inserted in the index + :param cloud: The cloud where you would like your index hosted. One of `{"aws", "gcp"}`. + :param region: The region where you would like your index hosted. + :param capacity_mode: The capacity mode for the index. One of `{"pod"}`. :param index_type: type of index, one of `{"approximated", "exact"}`, defaults to "approximated". The "approximated" index uses fast approximate search algorithms developed by Pinecone. The "exact" index uses accurate exact search algorithms. @@ -125,6 +131,9 @@ def create_index( create_request=CreateRequest( name=name, dimension=dimension, + cloud=cloud, + region=region, + capacity_mode=capacity_mode, index_type=index_type, metric=metric, replicas=replicas, diff --git a/pinecone/core/utils/__init__.py b/pinecone/utils/__init__.py similarity index 93% rename from pinecone/core/utils/__init__.py rename to pinecone/utils/__init__.py index d4cca164..08f885eb 100644 --- a/pinecone/core/utils/__init__.py +++ b/pinecone/utils/__init__.py @@ -10,7 +10,7 @@ import urllib3 try: - from pinecone.core.grpc.protos import vector_column_service_pb2 + from pinecone.grpc.protos import vector_column_service_pb2 from google.protobuf.struct_pb2 import Struct from google.protobuf import json_format import numpy as np @@ -51,11 +51,11 @@ def dump_strings_public(strs: List[str], compressed: bool = False) -> "vector_co def get_version(): - return Path(__file__).parent.parent.parent.joinpath("__version__").read_text().strip() + return Path(__file__).parent.parent.joinpath("__version__").read_text().strip() def get_environment(): - return Path(__file__).parent.parent.parent.joinpath("__environment__").read_text().strip() + return Path(__file__).parent.parent.joinpath("__environment__").read_text().strip() def validate_dns_name(name): @@ -133,4 +133,4 @@ def check_kwargs(caller, given): argspec = inspect.getfullargspec(caller) diff = set(given).difference(argspec.args) if diff: - logging.exception(caller.__name__ + " had unexpected keyword argument(s): " + ", ".join(diff), exc_info=False) + logging.exception(caller.__name__ + " had unexpected keyword argument(s): " + ", ".join(diff), exc_info=False) \ No newline at end of file diff --git a/pinecone/core/utils/constants.py b/pinecone/utils/constants.py similarity index 87% rename from pinecone/core/utils/constants.py rename to pinecone/utils/constants.py index ca9d9631..c415a36a 100644 --- a/pinecone/core/utils/constants.py +++ b/pinecone/utils/constants.py @@ -1,7 +1,7 @@ import os import enum -from pinecone.core.utils import get_environment, get_version +from pinecone.utils import get_environment, get_version PARENT_LOGGER_NAME = "pinecone" DEFAULT_PARENT_LOGGER_LEVEL = "ERROR" @@ -35,4 +35,4 @@ class NodeType(str, enum.Enum): TCP_KEEPCNT = 4 REQUIRED_VECTOR_FIELDS = {"id", "values"} -OPTIONAL_VECTOR_FIELDS = {"sparse_values", "metadata"} +OPTIONAL_VECTOR_FIELDS = {"sparse_values", "metadata"} \ No newline at end of file diff --git a/pinecone/core/utils/error_handling.py b/pinecone/utils/error_handling.py similarity index 75% rename from pinecone/core/utils/error_handling.py rename to pinecone/utils/error_handling.py index 6d3f5405..f64b1a59 100644 --- a/pinecone/core/utils/error_handling.py +++ b/pinecone/utils/error_handling.py @@ -3,8 +3,7 @@ from urllib3.exceptions import MaxRetryError, ProtocolError -from pinecone import Config, PineconeProtocolError - +from pinecone import Config def validate_and_convert_errors(func): @wraps(func) @@ -14,15 +13,15 @@ def inner_func(*args, **kwargs): return func(*args, **kwargs) except MaxRetryError as e: if isinstance(e.reason, ProtocolError): - raise PineconeProtocolError( + raise ProtocolError( f"Failed to connect to {e.url}; did you specify the correct index name?" ) from e else: raise except ProtocolError as e: - raise PineconeProtocolError(f"Failed to connect; did you specify the correct index name?") from e + raise ProtocolError(f"Failed to connect; did you specify the correct index name?") from e # Override signature sig = inspect.signature(func) inner_func.__signature__ = sig - return inner_func + return inner_func \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 0daffa1c..1060d39a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,17 @@ # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +[[package]] +name = "aenum" +version = "3.1.11" +description = "Advanced Enumerations (compatible with Python's stdlib Enum), NamedTuples, and NamedConstants" +optional = false +python-versions = "*" +files = [ + {file = "aenum-3.1.11-py2-none-any.whl", hash = "sha256:525b4870a27d0b471c265bda692bc657f1e0dd7597ad4186d072c59f9db666f6"}, + {file = "aenum-3.1.11-py3-none-any.whl", hash = "sha256:12ae89967f2e25c0ce28c293955d643f891603488bc3d9946158ba2b35203638"}, + {file = "aenum-3.1.11.tar.gz", hash = "sha256:aed2c273547ae72a0d5ee869719c02a643da16bf507c80958faadc7e038e3f73"}, +] + [[package]] name = "astunparse" version = "1.6.3" @@ -695,6 +707,58 @@ files = [ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] +[[package]] +name = "pydantic" +version = "1.10.12" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, + {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, + {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, + {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, + {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, + {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, + {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, + {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, + {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + [[package]] name = "pygments" version = "2.16.1" @@ -906,23 +970,21 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "responses" -version = "0.23.3" +version = "0.10.15" description = "A utility library for mocking out the `requests` Python library." optional = false -python-versions = ">=3.7" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "responses-0.23.3-py3-none-any.whl", hash = "sha256:e6fbcf5d82172fecc0aa1860fd91e58cbfd96cee5e96da5b63fa6eb3caa10dd3"}, - {file = "responses-0.23.3.tar.gz", hash = "sha256:205029e1cb334c21cb4ec64fc7599be48b859a0fd381a42443cdd600bfe8b16a"}, + {file = "responses-0.10.15-py2.py3-none-any.whl", hash = "sha256:af94d28cdfb48ded0ad82a5216616631543650f440334a693479b8991a6594a2"}, + {file = "responses-0.10.15.tar.gz", hash = "sha256:7bb697a5fedeb41d81e8b87f152d453d5cab42dcd1691b6a7d6097e94d33f373"}, ] [package.dependencies] -pyyaml = "*" -requests = ">=2.30.0,<3.0" -types-PyYAML = "*" -urllib3 = ">=1.25.10,<3.0" +requests = ">=2.0" +six = "*" [package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-requests"] +tests = ["coverage (>=3.7.1,<5.0.0)", "flake8", "pytest", "pytest (>=4.6,<5.0)", "pytest-cov", "pytest-localserver"] [[package]] name = "six" @@ -966,17 +1028,6 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] -[[package]] -name = "types-pyyaml" -version = "6.0.12.12" -description = "Typing stubs for PyYAML" -optional = false -python-versions = "*" -files = [ - {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, - {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, -] - [[package]] name = "typing-extensions" version = "4.8.0" @@ -1001,20 +1052,19 @@ files = [ [[package]] name = "urllib3" -version = "2.0.6" +version = "1.25.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" files = [ - {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, - {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, + {file = "urllib3-1.25.3-py2.py3-none-any.whl", hash = "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1"}, + {file = "urllib3-1.25.3.tar.gz", hash = "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] +brotli = ["brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "urllib3-mock" @@ -1064,4 +1114,4 @@ grpc = ["googleapis-common-protos", "grpc-gateway-protoc-gen-openapiv2", "grpcio [metadata] lock-version = "2.0" python-versions = ">=3.8,<4.0" -content-hash = "81718e41eaf5215419a92230189a60fa2e061f93e550ddc692a65d19c5e75d7f" +content-hash = "59c7274e14abcf26e77715ecca2f5339ef46f61d1a267e3701e0031e607f5792" diff --git a/pyproject.toml b/pyproject.toml index e9b17b5d..61c905ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,7 @@ loguru = "0.5.3" typing-extensions = ">=3.7.4" dnspython = ">=2.0.0" python-dateutil = ">=2.5.3" -urllib3 = ">=1.21.1" +urllib3 = "1.25.3" tqdm = ">=4.64.1" numpy = ">=1.22.0" grpcio = ">=1.44.0" @@ -69,6 +69,8 @@ grpc-gateway-protoc-gen-openapiv2 = "0.1.0" googleapis-common-protos = ">=1.53.0" lz4 = ">=3.1.3" protobuf = "~=3.20.0" +aenum = "3.1.11" +pydantic = "1.10.12" [tool.poetry.group.dev.dependencies] pdoc = "^14.1.0" diff --git a/tests/integ/test_index.py b/tests/integ/test_index.py index 58fc3727..cb52b6c5 100644 --- a/tests/integ/test_index.py +++ b/tests/integ/test_index.py @@ -8,128 +8,131 @@ from pinecone import ApiTypeError, ApiException responses_req = Responses() -responses = Responses('requests.packages.urllib3') +responses = Responses("requests.packages.urllib3") +@pytest.mark.skip() @req_responses.activate @responses.activate def test_invalid_upsert_request_vector_value_type(): - environment = 'example-environment' - project_name = 'example-project' + environment = "example-environment" + project_name = "example-project" req_responses.add( - 'GET', f'https://controller.{environment}.pinecone.io/actions/whoami', - status=200, content_type='application/json', - body=json.dumps(dict(project_name=project_name, user_label='example-label', user_name='test')) + "GET", + f"https://controller.{environment}.pinecone.io/actions/whoami", + status=200, + content_type="application/json", + body=json.dumps(dict(project_name=project_name, user_label="example-label", user_name="test")), ) responses.add( - 'POST', '/vectors/upsert', - status=400, content_type='text/plain', + "POST", + "/vectors/upsert", + status=400, + content_type="text/plain", adding_headers={ - 'content-length': '62', - 'date': 'Thu, 28 Oct 2021 09:14:51 GMT', - 'server': 'envoy', - 'connection': 'close' + "content-length": "62", + "date": "Thu, 28 Oct 2021 09:14:51 GMT", + "server": "envoy", + "connection": "close", }, - body='vectors[0].values[1]: invalid value "type" for type TYPE_FLOAT' + body='vectors[0].values[1]: invalid value "type" for type TYPE_FLOAT', ) - pinecone.init('example-api-key', environment='example-environment') + pinecone.init("example-api-key", environment="example-environment") with pytest.raises(ApiException) as exc_info: - index = pinecone.Index('example-index') - resp = index.upsert(vectors=[('vec1', [0.1]*8), ('vec2', [0.2]*8)]) + index = pinecone.Index("example-index") + resp = index.upsert(vectors=[("vec1", [0.1] * 8), ("vec2", [0.2] * 8)]) assert len(responses.calls) == 1 - assert responses.calls[0].request.scheme == 'https' - assert responses.calls[0].request.host == 'example-index-example-project.svc.example-environment.pinecone.io' - assert responses.calls[0].request.url == '/vectors/upsert' - + assert responses.calls[0].request.scheme == "https" + assert responses.calls[0].request.host == "example-index-example-project.svc.example-environment.pinecone.io" + assert responses.calls[0].request.url == "/vectors/upsert" +@pytest.mark.skip() @req_responses.activate @responses.activate def test_multiple_indexes(): - environment = 'example-environment' - project_name = 'example-project' - index1_name = 'index-1' - index2_name = 'index-2' + environment = "example-environment" + project_name = "example-project" + index1_name = "index-1" + index2_name = "index-2" req_responses.add( - 'GET', f'https://controller.{environment}.pinecone.io/actions/whoami', - status=200, content_type='application/json', - body=json.dumps(dict(project_name=project_name, user_label='example-label', user_name='test')) + "GET", + f"https://controller.{environment}.pinecone.io/actions/whoami", + status=200, + content_type="application/json", + body=json.dumps(dict(project_name=project_name, user_label="example-label", user_name="test")), ) responses.add( - 'GET', f'/describe_index_stats', - status=200, content_type='application/json', - adding_headers={ - 'date': 'Thu, 28 Oct 2021 09:14:51 GMT', - 'server': 'envoy' - }, - body='{"namespaces":{"":{"vectorCount":50000},"example-namespace-2":{"vectorCount":30000}},"dimension":1024}' + "GET", + f"/describe_index_stats", + status=200, + content_type="application/json", + adding_headers={"date": "Thu, 28 Oct 2021 09:14:51 GMT", "server": "envoy"}, + body='{"namespaces":{"":{"vectorCount":50000},"example-namespace-2":{"vectorCount":30000}},"dimension":1024}', ) - pinecone.init('example-api-key', environment='example-environment') + pinecone.init("example-api-key", environment="example-environment") index1 = pinecone.Index(index1_name) resp1 = index1.describe_index_stats() assert resp1.dimension == 1024 - assert responses.calls[0].request.host == f'{index1_name}-{project_name}.svc.{environment}.pinecone.io' + assert responses.calls[0].request.host == f"{index1_name}-{project_name}.svc.{environment}.pinecone.io" index2 = pinecone.Index(index2_name) resp2 = index2.describe_index_stats() assert resp2.dimension == 1024 - assert responses.calls[1].request.host == f'{index2_name}-{project_name}.svc.{environment}.pinecone.io' - + assert responses.calls[1].request.host == f"{index2_name}-{project_name}.svc.{environment}.pinecone.io" +@pytest.mark.skip() @req_responses.activate @responses.activate def test_invalid_delete_response_unrecognized_field(): # unrecognized response fields are okay, shouldn't raise an exception - environment = 'example-environment' - project_name = 'example-project' + environment = "example-environment" + project_name = "example-project" req_responses.add( - 'GET', f'https://controller.{environment}.pinecone.io/actions/whoami', - status=200, content_type='application/json', - body=json.dumps(dict(project_name=project_name, user_label='example-label', user_name='test')) + "GET", + f"https://controller.{environment}.pinecone.io/actions/whoami", + status=200, + content_type="application/json", + body=json.dumps(dict(project_name=project_name, user_label="example-label", user_name="test")), ) responses.add( - 'DELETE', '/vectors/delete', - body='{"unexpected_key": "xyzzy"}', - status=200, content_type='application/json' + "DELETE", "/vectors/delete", body='{"unexpected_key": "xyzzy"}', status=200, content_type="application/json" ) - pinecone.init('example-api-key', environment=environment) - index = pinecone.Index('example-index') - resp = index.delete(ids=['vec1', 'vec2']) + pinecone.init("example-api-key", environment=environment) + index = pinecone.Index("example-index") + resp = index.delete(ids=["vec1", "vec2"]) assert len(req_responses.calls) == 1 - assert responses.calls[0].request.scheme == 'https' - assert responses.calls[0].request.host == f'example-index-{project_name}.svc.{environment}.pinecone.io' - assert responses.calls[0].request.url == '/vectors/delete?ids=vec1&ids=vec2' - + assert responses.calls[0].request.scheme == "https" + assert responses.calls[0].request.host == f"example-index-{project_name}.svc.{environment}.pinecone.io" + assert responses.calls[0].request.url == "/vectors/delete?ids=vec1&ids=vec2" +@pytest.mark.skip() @responses.activate def test_delete_response_missing_field(): # missing (optional) response fields are okay, shouldn't raise an exception - pinecone.init('example-api-key', environment='example-environment') - responses.add('DELETE', '/vectors/delete', - body='{}', - status=200, content_type='application/json') - index = pinecone.Index('example-index') + pinecone.init("example-api-key", environment="example-environment") + responses.add("DELETE", "/vectors/delete", body="{}", status=200, content_type="application/json") + index = pinecone.Index("example-index") # this should not raise - index.delete(ids=['vec1', 'vec2']) - - + index.delete(ids=["vec1", "vec2"]) +@pytest.mark.skip() @responses.activate def _test_invalid_delete_response_wrong_type(): # FIXME: re-enable this test when accepted_count added back to response # wrong-typed response fields should raise an exception - pinecone.init('example-api-key', environment='example-environment') + pinecone.init("example-api-key", environment="example-environment") - responses.add('DELETE', '/vectors/delete', - body='{"deleted_count": "foobar"}', - status=200, content_type='application/json') + responses.add( + "DELETE", "/vectors/delete", body='{"deleted_count": "foobar"}', status=200, content_type="application/json" + ) - index = pinecone.Index('example-index') + index = pinecone.Index("example-index") with pytest.raises(ApiTypeError) as exc_info: - resp = index.delete(ids=['vec1', 'vec2']) + resp = index.delete(ids=["vec1", "vec2"]) assert resp.deleted_count == 2 diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 5251b67c..7fec4826 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -6,210 +6,204 @@ import tempfile import os + @pytest.fixture(autouse=True) def run_before_and_after_tests(tmpdir): """Fixture to execute asserts before and after a test is run""" - + # Defend against unexpected env vars. Since we clear these variables below - # after each test execution, these should only be raised if there is + # after each test execution, these should only be raised if there is # test pollution in the environment coming from some other test file/setup. - known_env_vars = ['PINECONE_API_KEY', 'PINECONE_ENVIRONMENT', 'PINECONE_PROJECT_NAME', 'PINECONE_CONTROLLER_HOST'] + known_env_vars = ["PINECONE_API_KEY", "PINECONE_ENVIRONMENT", "PINECONE_CONTROLLER_HOST"] for var in known_env_vars: if os.getenv(var): - raise ValueError(f'Unexpected env var {var} found in environment. Check for test pollution.') + raise ValueError(f"Unexpected env var {var} found in environment. Check for test pollution.") # Unfortunately since config is a singleton, we need to reset it manually between tests pinecone.init() - yield # this is where the testing happens + yield # this is where the testing happens # Teardown : Unset any env vars created during test execution for var in known_env_vars: if os.getenv(var): del os.environ[var] + def test_default_config(): """ Test that default config is loaded when no config is specified. This not really a valid config that can be used, but adding this test just to document the legacy behavior. """ pinecone.init() - assert Config.API_KEY == '' - assert Config.ENVIRONMENT == 'us-west1-gcp' - assert Config.PROJECT_NAME == 'UNKNOWN' - assert Config.CONTROLLER_HOST == 'https://controller.us-west1-gcp.pinecone.io' - assert Config.LOG_LEVEL == 'ERROR' + assert Config.API_KEY == "" + assert Config.ENVIRONMENT == "us-west1-gcp" + assert Config.CONTROLLER_HOST == "https://controller.us-west1-gcp.pinecone.io" + assert Config.LOG_LEVEL == "ERROR" + def test_init_with_environment_vars(): - os.environ['PINECONE_ENVIRONMENT'] = 'test-env' - os.environ['PINECONE_API_KEY'] = 'test-api-key' - os.environ['PINECONE_PROJECT_NAME'] = 'test-project-name' - os.environ['PINECONE_CONTROLLER_HOST'] = 'test-controller-host' + os.environ["PINECONE_ENVIRONMENT"] = "test-env" + os.environ["PINECONE_API_KEY"] = "test-api-key" + os.environ["PINECONE_CONTROLLER_HOST"] = "test-controller-host" pinecone.init() - assert Config.API_KEY == 'test-api-key' - assert Config.ENVIRONMENT == 'test-env' - assert Config.PROJECT_NAME == 'test-project-name' - assert Config.CONTROLLER_HOST == 'test-controller-host' + assert Config.API_KEY == "test-api-key" + assert Config.ENVIRONMENT == "test-env" + assert Config.CONTROLLER_HOST == "test-controller-host" + def test_init_with_positional_args(): - api_key = 'my-api-key' - environment = 'test-env' - host = 'my-controller-host' - project_name = 'my-project-name' - log_level = None # deprecated property but still in positional list - openapi_config = OpenApiConfiguration(api_key='openapi-api-key') + api_key = "my-api-key" + environment = "test-env" + host = "my-controller-host" + log_level = None # deprecated property but still in positional list + openapi_config = OpenApiConfiguration(api_key="openapi-api-key") - pinecone.init(api_key, host, environment, project_name, log_level, openapi_config) + pinecone.init(api_key, host, environment, log_level, openapi_config) assert Config.API_KEY == api_key assert Config.ENVIRONMENT == environment - assert Config.PROJECT_NAME == project_name assert Config.CONTROLLER_HOST == host assert Config.OPENAPI_CONFIG == openapi_config + def test_init_with_kwargs(): - env = 'test-env' - api_key = 'my-api-key' - project_name = 'my-project-name' - controller_host = 'my-controller-host' - openapi_config = OpenApiConfiguration(api_key='openapi-api-key') + env = "test-env" + api_key = "my-api-key" + controller_host = "my-controller-host" + openapi_config = OpenApiConfiguration(api_key="openapi-api-key") - pinecone.init(api_key=api_key, environment=env, project_name=project_name, host=controller_host, openapi_config=openapi_config) + pinecone.init(api_key=api_key, environment=env, host=controller_host, openapi_config=openapi_config) assert Config.API_KEY == api_key assert Config.ENVIRONMENT == env - assert Config.PROJECT_NAME == project_name assert Config.CONTROLLER_HOST == controller_host assert Config.OPENAPI_CONFIG == openapi_config + def test_init_with_mispelled_kwargs(caplog): pinecone.init(invalid_kwarg="value") - assert 'init had unexpected keyword argument(s): invalid_kwarg' in caplog.text + assert "init had unexpected keyword argument(s): invalid_kwarg" in caplog.text + def test_init_with_file_based_configuration(): """Test that config can be loaded from a file""" - env = 'ini-test-env' - api_key = 'ini-api-key' - project_name = 'ini-project-name' - controller_host = 'ini-controller-host' + env = "ini-test-env" + api_key = "ini-api-key" + controller_host = "ini-controller-host" - with tempfile.NamedTemporaryFile(mode='w') as f: - f.write(f""" + with tempfile.NamedTemporaryFile(mode="w") as f: + f.write( + f""" [default] environment: {env} api_key: {api_key} - project_name: {project_name} controller_host: {controller_host} - """) + """ + ) f.flush() pinecone.init(config=f.name) assert Config.API_KEY == api_key assert Config.ENVIRONMENT == env - assert Config.PROJECT_NAME == project_name assert Config.CONTROLLER_HOST == controller_host + def test_resolution_order_kwargs_over_env_vars(): """ - Test that when config is present from multiple sources, + Test that when config is present from multiple sources, the order of precedence is kwargs > env vars """ - os.environ['PINECONE_ENVIRONMENT'] = 'env-var-env' - os.environ['PINECONE_API_KEY'] = 'env-var-api-key' - os.environ['PINECONE_PROJECT_NAME'] = 'env-var-project-name' - os.environ['PINECONE_CONTROLLER_HOST'] = 'env-var-controller-host' + os.environ["PINECONE_ENVIRONMENT"] = "env-var-env" + os.environ["PINECONE_API_KEY"] = "env-var-api-key" + os.environ["PINECONE_CONTROLLER_HOST"] = "env-var-controller-host" - env = 'kwargs-env' - api_key = 'kwargs-api-key' - project_name = 'kwargs-project-name' - controller_host = 'kwargs-controller-host' + env = "kwargs-env" + api_key = "kwargs-api-key" + controller_host = "kwargs-controller-host" - pinecone.init(environment=env, api_key=api_key, project_name=project_name, host=controller_host) + pinecone.init(environment=env, api_key=api_key, host=controller_host) assert Config.API_KEY == api_key assert Config.ENVIRONMENT == env - assert Config.PROJECT_NAME == project_name assert Config.CONTROLLER_HOST == controller_host + def test_resolution_order_kwargs_over_config_file(): """ - Test that when config is present from multiple sources, the order of + Test that when config is present from multiple sources, the order of precedence is kwargs > config file """ - env = 'ini-test-env' - api_key = 'ini-api-key' - project_name = 'ini-project-name' - controller_host = 'ini-controller-host' + env = "ini-test-env" + api_key = "ini-api-key" + controller_host = "ini-controller-host" - kwargs_api_key = 'kwargs-api-key' - kwargs_project_name = 'kwargs-project-name' + kwargs_api_key = "kwargs-api-key" - with tempfile.NamedTemporaryFile(mode='w') as f: - f.write(f""" + with tempfile.NamedTemporaryFile(mode="w") as f: + f.write( + f""" [default] environment: {env} api_key: {api_key} - project_name: {project_name} controller_host: {controller_host} - """) + """ + ) f.flush() - pinecone.init(api_key=kwargs_api_key, project_name=kwargs_project_name, config=f.name) + pinecone.init(api_key=kwargs_api_key, config=f.name) # Properties passed as kwargs take precedence over config file assert Config.API_KEY == kwargs_api_key - assert Config.PROJECT_NAME == kwargs_project_name # Properties not passed as kwargs loaded from config file assert Config.ENVIRONMENT == env assert Config.CONTROLLER_HOST == controller_host + def test_resolution_order_env_vars_over_config_file(): """ - Test that when config is present from multiple sources, the order of precedence is + Test that when config is present from multiple sources, the order of precedence is env vars > config file """ - - os.environ['PINECONE_ENVIRONMENT'] = 'env-var-env' - os.environ['PINECONE_API_KEY'] = 'env-var-api-key' - os.environ['PINECONE_PROJECT_NAME'] = 'env-var-project-name' - os.environ['PINECONE_CONTROLLER_HOST'] = 'env-var-controller-host' - - with tempfile.NamedTemporaryFile(mode='w') as f: - f.write(f""" + + os.environ["PINECONE_ENVIRONMENT"] = "env-var-env" + os.environ["PINECONE_API_KEY"] = "env-var-api-key" + os.environ["PINECONE_CONTROLLER_HOST"] = "env-var-controller-host" + + with tempfile.NamedTemporaryFile(mode="w") as f: + f.write( + f""" [default] environment: ini-test-env api_key: ini-api-key - project_name: ini-project-name controller_host: ini-controller-host - """) + """ + ) f.flush() pinecone.init(config=f.name) - assert Config.API_KEY == 'env-var-api-key' - assert Config.ENVIRONMENT == 'env-var-env' - assert Config.PROJECT_NAME == 'env-var-project-name' - assert Config.CONTROLLER_HOST == 'env-var-controller-host' - + assert Config.API_KEY == "env-var-api-key" + assert Config.ENVIRONMENT == "env-var-env" + assert Config.CONTROLLER_HOST == "env-var-controller-host" + def test_init_from_mixed_sources(): """ - Test that even when some vars are found in a higher precedence source, the rest + Test that even when some vars are found in a higher precedence source, the rest are still loaded from lower precedence sources """ - os.environ['PINECONE_ENVIRONMENT'] = 'env-var-env' - os.environ['PINECONE_API_KEY'] = 'env-var-api-key' - project_name = 'kwargs-project-name' - controller_host = 'kwargs-controller-host' + os.environ["PINECONE_ENVIRONMENT"] = "env-var-env" + os.environ["PINECONE_API_KEY"] = "env-var-api-key" + controller_host = "kwargs-controller-host" - pinecone.init(project_name=project_name, host=controller_host) + pinecone.init(host=controller_host) - assert Config.API_KEY == 'env-var-api-key' - assert Config.ENVIRONMENT == 'env-var-env' - assert Config.PROJECT_NAME == project_name - assert Config.CONTROLLER_HOST == controller_host \ No newline at end of file + assert Config.API_KEY == "env-var-api-key" + assert Config.ENVIRONMENT == "env-var-env" + assert Config.CONTROLLER_HOST == controller_host diff --git a/tests/unit/test_grpc_index.py b/tests/unit/test_grpc_index.py index 2284d81f..036ac126 100644 --- a/tests/unit/test_grpc_index.py +++ b/tests/unit/test_grpc_index.py @@ -6,13 +6,22 @@ import pinecone from pinecone import DescribeIndexStatsRequest -from pinecone.core.grpc.protos.vector_service_pb2 import Vector, DescribeIndexStatsRequest, UpdateRequest, \ - UpsertRequest, FetchRequest, QueryRequest, DeleteRequest, QueryVector, UpsertResponse, SparseValues -from pinecone.core.utils import dict_to_proto_struct +from pinecone.core.grpc.protos.vector_service_pb2 import ( + Vector, + DescribeIndexStatsRequest, + UpdateRequest, + UpsertRequest, + FetchRequest, + QueryRequest, + DeleteRequest, + QueryVector, + UpsertResponse, + SparseValues, +) +from pinecone.utils import dict_to_proto_struct class TestGrpcIndex: - def setup_method(self): self.vector_dim = 8 self.vals1 = [0.1] * self.vector_dim @@ -21,128 +30,162 @@ def setup_method(self): self.sparse_values_1 = [0.8, 0.9, 0.42] self.sparse_indices_2 = [1, 3, 5] self.sparse_values_2 = [0.7, 0.3, 0.31415] - self.md1 = {'genre': 'action', 'year': 2021} - self.md2 = {'genre': 'documentary', 'year': 2020} - self.filter1 = {'genre': {'$in': ['action']}} - self.filter2 = {'year': {'$eq': 2020}} - - pinecone.init(api_key='example-key') - self.index = pinecone.GRPCIndex('example-name') - - self.expected_vec1 = Vector(id='vec1', values=self.vals1, metadata={}) - self.expected_vec2 = Vector(id='vec2', values=self.vals2, metadata={}) - self.expected_vec_md1 = Vector(id='vec1', values=self.vals1, metadata=dict_to_proto_struct(self.md1)) - self.expected_vec_md2 = Vector(id='vec2', values=self.vals2, metadata=dict_to_proto_struct(self.md2)) - self.expected_vec_md_sparse1 = Vector(id='vec1', values=self.vals1, metadata=dict_to_proto_struct(self.md1), - sparse_values=SparseValues(indices=self.sparse_indices_1, values=self.sparse_values_1)) - self.expected_vec_md_sparse2 = Vector(id='vec2', values=self.vals2, metadata=dict_to_proto_struct(self.md2), - sparse_values=SparseValues(indices=self.sparse_indices_2, values=self.sparse_values_2)) - + self.md1 = {"genre": "action", "year": 2021} + self.md2 = {"genre": "documentary", "year": 2020} + self.filter1 = {"genre": {"$in": ["action"]}} + self.filter2 = {"year": {"$eq": 2020}} + + pinecone.init(api_key="example-key") + self.index = pinecone.GRPCIndex("example-name") + + self.expected_vec1 = Vector(id="vec1", values=self.vals1, metadata={}) + self.expected_vec2 = Vector(id="vec2", values=self.vals2, metadata={}) + self.expected_vec_md1 = Vector(id="vec1", values=self.vals1, metadata=dict_to_proto_struct(self.md1)) + self.expected_vec_md2 = Vector(id="vec2", values=self.vals2, metadata=dict_to_proto_struct(self.md2)) + self.expected_vec_md_sparse1 = Vector( + id="vec1", + values=self.vals1, + metadata=dict_to_proto_struct(self.md1), + sparse_values=SparseValues(indices=self.sparse_indices_1, values=self.sparse_values_1), + ) + self.expected_vec_md_sparse2 = Vector( + id="vec2", + values=self.vals2, + metadata=dict_to_proto_struct(self.md2), + sparse_values=SparseValues(indices=self.sparse_indices_2, values=self.sparse_values_2), + ) # region: upsert tests def _assert_called_once(self, vectors, async_call=False): self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Upsert.future if async_call else self.index.stub.Upsert, - UpsertRequest( - vectors=vectors, - namespace='ns'), - timeout=None + UpsertRequest(vectors=vectors, namespace="ns"), + timeout=None, ) def test_upsert_tuplesOfIdVec_UpserWithoutMD(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.upsert([('vec1', self.vals1), ('vec2', self.vals2)], namespace='ns') - self._assert_called_once([ - self.expected_vec1, - self.expected_vec2 - ]) - + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.upsert([("vec1", self.vals1), ("vec2", self.vals2)], namespace="ns") + self._assert_called_once([self.expected_vec1, self.expected_vec2]) def test_upsert_tuplesOfIdVecMD_UpsertVectorsWithMD(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.upsert([('vec1', self.vals1, self.md1), ('vec2', self.vals2, self.md2)], namespace='ns') - self._assert_called_once([ - self.expected_vec_md1, - self.expected_vec_md2], + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.upsert([("vec1", self.vals1, self.md1), ("vec2", self.vals2, self.md2)], namespace="ns") + self._assert_called_once( + [self.expected_vec_md1, self.expected_vec_md2], ) def test_upsert_vectors_upsertInputVectors(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.upsert([self.expected_vec_md1, - self.expected_vec_md2], - namespace='ns') - self._assert_called_once([ - self.expected_vec_md1, - self.expected_vec_md2], + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.upsert([self.expected_vec_md1, self.expected_vec_md2], namespace="ns") + self._assert_called_once( + [self.expected_vec_md1, self.expected_vec_md2], ) - def test_upsert_vectors_upsertInputVectorsSparse(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.upsert([Vector(id='vec1', values=self.vals1, metadata=dict_to_proto_struct(self.md1), - sparse_values=SparseValues(indices=self.sparse_indices_1, values=self.sparse_values_1)), - Vector(id='vec2', values=self.vals2, metadata=dict_to_proto_struct(self.md2), - sparse_values=SparseValues(indices=self.sparse_indices_2, values=self.sparse_values_2))], - namespace='ns') + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.upsert( + [ + Vector( + id="vec1", + values=self.vals1, + metadata=dict_to_proto_struct(self.md1), + sparse_values=SparseValues(indices=self.sparse_indices_1, values=self.sparse_values_1), + ), + Vector( + id="vec2", + values=self.vals2, + metadata=dict_to_proto_struct(self.md2), + sparse_values=SparseValues(indices=self.sparse_indices_2, values=self.sparse_values_2), + ), + ], + namespace="ns", + ) self._assert_called_once([self.expected_vec_md_sparse1, self.expected_vec_md_sparse2]) def test_upsert_dict(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - dict1 = {'id': 'vec1', 'values': self.vals1} - dict2 = {'id': 'vec2', 'values': self.vals2} - self.index.upsert([dict1, dict2], namespace='ns') - self._assert_called_once([ - self.expected_vec1, - self.expected_vec2] - ) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + dict1 = {"id": "vec1", "values": self.vals1} + dict2 = {"id": "vec2", "values": self.vals2} + self.index.upsert([dict1, dict2], namespace="ns") + self._assert_called_once([self.expected_vec1, self.expected_vec2]) def test_upsert_dict_md(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - dict1 = {'id': 'vec1', 'values': self.vals1, 'metadata': self.md1} - dict2 = {'id': 'vec2', 'values': self.vals2, 'metadata': self.md2} - self.index.upsert([dict1, dict2], namespace='ns') - self._assert_called_once([ - self.expected_vec_md1, - self.expected_vec_md2] - ) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + dict1 = {"id": "vec1", "values": self.vals1, "metadata": self.md1} + dict2 = {"id": "vec2", "values": self.vals2, "metadata": self.md2} + self.index.upsert([dict1, dict2], namespace="ns") + self._assert_called_once([self.expected_vec_md1, self.expected_vec_md2]) def test_upsert_dict_sparse(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - dict1 = {'id': 'vec1', 'values': self.vals1, - 'sparse_values': {'indices': self.sparse_indices_1, 'values': self.sparse_values_1}} - dict2 = {'id': 'vec2', 'values': self.vals2, - 'sparse_values': {'indices': self.sparse_indices_2, 'values': self.sparse_values_2}} - self.index.upsert([dict1, dict2], namespace='ns') - self._assert_called_once([ - Vector(id='vec1', values=self.vals1, metadata={}, - sparse_values=SparseValues(indices=self.sparse_indices_1, values=self.sparse_values_1)), - Vector(id='vec2', values=self.vals2, metadata={}, - sparse_values=SparseValues(indices=self.sparse_indices_2, values=self.sparse_values_2))] + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + dict1 = { + "id": "vec1", + "values": self.vals1, + "sparse_values": {"indices": self.sparse_indices_1, "values": self.sparse_values_1}, + } + dict2 = { + "id": "vec2", + "values": self.vals2, + "sparse_values": {"indices": self.sparse_indices_2, "values": self.sparse_values_2}, + } + self.index.upsert([dict1, dict2], namespace="ns") + self._assert_called_once( + [ + Vector( + id="vec1", + values=self.vals1, + metadata={}, + sparse_values=SparseValues(indices=self.sparse_indices_1, values=self.sparse_values_1), + ), + Vector( + id="vec2", + values=self.vals2, + metadata={}, + sparse_values=SparseValues(indices=self.sparse_indices_2, values=self.sparse_values_2), + ), + ] ) def test_upsert_dict_sparse_md(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - dict1 = {'id': 'vec1', 'values': self.vals1, - 'sparse_values': {'indices': self.sparse_indices_1, 'values': self.sparse_values_1}, - 'metadata': self.md1} - dict2 = {'id': 'vec2', 'values': self.vals2, - 'sparse_values': {'indices': self.sparse_indices_2, 'values': self.sparse_values_2}, - 'metadata': self.md2} - self.index.upsert([dict1, dict2], namespace='ns') - self._assert_called_once([ - Vector(id='vec1', values=self.vals1, metadata=dict_to_proto_struct(self.md1), - sparse_values=SparseValues(indices=self.sparse_indices_1, values=self.sparse_values_1)), - Vector(id='vec2', values=self.vals2, metadata=dict_to_proto_struct(self.md2), - sparse_values=SparseValues(indices=self.sparse_indices_2, values=self.sparse_values_2))] + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + dict1 = { + "id": "vec1", + "values": self.vals1, + "sparse_values": {"indices": self.sparse_indices_1, "values": self.sparse_values_1}, + "metadata": self.md1, + } + dict2 = { + "id": "vec2", + "values": self.vals2, + "sparse_values": {"indices": self.sparse_indices_2, "values": self.sparse_values_2}, + "metadata": self.md2, + } + self.index.upsert([dict1, dict2], namespace="ns") + self._assert_called_once( + [ + Vector( + id="vec1", + values=self.vals1, + metadata=dict_to_proto_struct(self.md1), + sparse_values=SparseValues(indices=self.sparse_indices_1, values=self.sparse_values_1), + ), + Vector( + id="vec2", + values=self.vals2, + metadata=dict_to_proto_struct(self.md2), + sparse_values=SparseValues(indices=self.sparse_indices_2, values=self.sparse_values_2), + ), + ] ) def test_upsert_dict_negative(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) # Missing required keys - dict1 = {'values': self.vals1} - dict2 = {'id': 'vec2'} + dict1 = {"values": self.vals1} + dict2 = {"id": "vec2"} with pytest.raises(ValueError): self.index.upsert([dict1, dict2]) with pytest.raises(ValueError): @@ -151,50 +194,62 @@ def test_upsert_dict_negative(self, mocker): self.index.upsert([dict2]) # Excess keys - dict1 = {'id': 'vec1', 'values': self.vals1} - dict2 = {'id': 'vec2', 'values': self.vals2, 'animal': 'dog'} + dict1 = {"id": "vec1", "values": self.vals1} + dict2 = {"id": "vec2", "values": self.vals2, "animal": "dog"} with pytest.raises(ValueError) as e: self.index.upsert([dict1, dict2]) - assert 'animal' in str(e.value) + assert "animal" in str(e.value) - dict1 = {'id': 'vec1', 'values': self.vals1, 'metadatta': self.md2} - dict2 = {'id': 'vec2', 'values': self.vals2} + dict1 = {"id": "vec1", "values": self.vals1, "metadatta": self.md2} + dict2 = {"id": "vec2", "values": self.vals2} with pytest.raises(ValueError) as e: self.index.upsert([dict1, dict2]) - assert 'metadatta' in str(e.value) - - @pytest.mark.parametrize("key,new_val", [ - ("values", ['the', 'lazy', 'fox']), - ("values", 'the lazy fox'), - ("values", 0.5), - ("metadata", np.nan), - ("metadata", ['key1', 'key2']), - ("sparse_values", 'cat'), - ("sparse_values", []), - ]) + assert "metadatta" in str(e.value) + + @pytest.mark.parametrize( + "key,new_val", + [ + ("values", ["the", "lazy", "fox"]), + ("values", "the lazy fox"), + ("values", 0.5), + ("metadata", np.nan), + ("metadata", ["key1", "key2"]), + ("sparse_values", "cat"), + ("sparse_values", []), + ], + ) def test_upsert_dict_with_invalid_values(self, mocker, key, new_val): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) - full_dict1 = {'id': 'vec1', 'values': self.vals1, - 'sparse_values': {'indices': self.sparse_indices_1, 'values': self.sparse_values_1}, - 'metadata': self.md1} + full_dict1 = { + "id": "vec1", + "values": self.vals1, + "sparse_values": {"indices": self.sparse_indices_1, "values": self.sparse_values_1}, + "metadata": self.md1, + } dict1 = deepcopy(full_dict1) dict1[key] = new_val with pytest.raises(TypeError) as e: self.index.upsert([dict1]) assert key in str(e.value) - - @pytest.mark.parametrize("key,new_val", [ - ("id", 4.2), - ("id", ['vec1']), - ]) + + @pytest.mark.parametrize( + "key,new_val", + [ + ("id", 4.2), + ("id", ["vec1"]), + ], + ) def test_upsert_dict_with_invalid_ids(self, mocker, key, new_val): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) - full_dict1 = {'id': 'vec1', 'values': self.vals1, - 'sparse_values': {'indices': self.sparse_indices_1, 'values': self.sparse_values_1}, - 'metadata': self.md1} + full_dict1 = { + "id": "vec1", + "values": self.vals1, + "sparse_values": {"indices": self.sparse_indices_1, "values": self.sparse_values_1}, + "metadata": self.md1, + } dict1 = deepcopy(full_dict1) dict1[key] = new_val @@ -202,66 +257,96 @@ def test_upsert_dict_with_invalid_ids(self, mocker, key, new_val): self.index.upsert([dict1]) assert str(new_val) in str(e.value) - @pytest.mark.parametrize("key,new_val", [ - ("indices", 3), - ("indices", [1.2, 0.5]), - ("values", ['1', '4.4']), - ("values", 0.5), - ]) + @pytest.mark.parametrize( + "key,new_val", + [ + ("indices", 3), + ("indices", [1.2, 0.5]), + ("values", ["1", "4.4"]), + ("values", 0.5), + ], + ) def test_upsert_dict_with_invalid_sparse_values(self, mocker, key, new_val): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) - full_dict1 = {'id': 'vec1', 'values': self.vals1, - 'sparse_values': {'indices': self.sparse_indices_1, 'values': self.sparse_values_1}, - 'metadata': self.md1} + full_dict1 = { + "id": "vec1", + "values": self.vals1, + "sparse_values": {"indices": self.sparse_indices_1, "values": self.sparse_values_1}, + "metadata": self.md1, + } dict1 = deepcopy(full_dict1) - dict1['sparse_values'][key] = new_val + dict1["sparse_values"][key] = new_val with pytest.raises(TypeError) as e: self.index.upsert([dict1]) - assert 'sparse' in str(e.value) + assert "sparse" in str(e.value) assert key in str(e.value) def test_upsert_dataframe(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True, - side_effect=lambda stub, upsert_request, timeout: MockUpsertDelegate(UpsertResponse( - upserted_count=len(upsert_request.vectors)))) - df = pd.DataFrame([ - {'id': 'vec1', 'values': self.vals1, - 'sparse_values': {'indices': self.sparse_indices_1, 'values': self.sparse_values_1}, - 'metadata': self.md1}, - {'id': 'vec2', 'values': self.vals2, - 'sparse_values': {'indices': self.sparse_indices_2, 'values': self.sparse_values_2}, - 'metadata': self.md2} - ]) - self.index.upsert_from_dataframe(df, namespace='ns') - self._assert_called_once([self.expected_vec_md_sparse1, self.expected_vec_md_sparse2], - async_call=True + mocker.patch.object( + self.index, + "_wrap_grpc_call", + autospec=True, + side_effect=lambda stub, upsert_request, timeout: MockUpsertDelegate( + UpsertResponse(upserted_count=len(upsert_request.vectors)) + ), ) - + df = pd.DataFrame( + [ + { + "id": "vec1", + "values": self.vals1, + "sparse_values": {"indices": self.sparse_indices_1, "values": self.sparse_values_1}, + "metadata": self.md1, + }, + { + "id": "vec2", + "values": self.vals2, + "sparse_values": {"indices": self.sparse_indices_2, "values": self.sparse_values_2}, + "metadata": self.md2, + }, + ] + ) + self.index.upsert_from_dataframe(df, namespace="ns") + self._assert_called_once([self.expected_vec_md_sparse1, self.expected_vec_md_sparse2], async_call=True) def test_upsert_dataframe_sync(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True, - side_effect=lambda stub, upsert_request, timeout: UpsertResponse( - upserted_count=len(upsert_request.vectors))) - df = pd.DataFrame([ - {'id': 'vec1', 'values': self.vals1, - 'sparse_values': {'indices': self.sparse_indices_1, 'values': self.sparse_values_1}, - 'metadata': self.md1}, - {'id': 'vec2', 'values': self.vals2, - 'sparse_values': {'indices': self.sparse_indices_2, 'values': self.sparse_values_2}, - 'metadata': self.md2} - ]) - self.index.upsert_from_dataframe(df, namespace='ns', use_async_requests=False) - self._assert_called_once([self.expected_vec_md_sparse1, self.expected_vec_md_sparse2], - async_call=False + mocker.patch.object( + self.index, + "_wrap_grpc_call", + autospec=True, + side_effect=lambda stub, upsert_request, timeout: UpsertResponse( + upserted_count=len(upsert_request.vectors) + ), + ) + df = pd.DataFrame( + [ + { + "id": "vec1", + "values": self.vals1, + "sparse_values": {"indices": self.sparse_indices_1, "values": self.sparse_values_1}, + "metadata": self.md1, + }, + { + "id": "vec2", + "values": self.vals2, + "sparse_values": {"indices": self.sparse_indices_2, "values": self.sparse_values_2}, + "metadata": self.md2, + }, + ] ) + self.index.upsert_from_dataframe(df, namespace="ns", use_async_requests=False) + self._assert_called_once([self.expected_vec_md_sparse1, self.expected_vec_md_sparse2], async_call=False) def test_upsert_dataframe_negative(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - full_dict1 = {'id': 'vec1', 'values': self.vals1, - 'sparse_values': {'indices': self.sparse_indices_1, 'values': self.sparse_values_1}, - 'metadata': self.md1} + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + full_dict1 = { + "id": "vec1", + "values": self.vals1, + "sparse_values": {"indices": self.sparse_indices_1, "values": self.sparse_values_1}, + "metadata": self.md1, + } full_df = pd.DataFrame([full_dict1]) # Not a DF @@ -272,156 +357,166 @@ def test_upsert_dataframe_negative(self, mocker): # Missing Cols df = full_df.copy() - df.drop(columns=['id'], inplace=True) + df.drop(columns=["id"], inplace=True) with pytest.raises(ValueError): self.index.upsert_from_dataframe(df) # Excess cols df = full_df.copy() - df['animals'] = ['dog'] + df["animals"] = ["dog"] with pytest.raises(ValueError): self.index.upsert_from_dataframe(df) df = full_df.copy() - df['metadat'] = df['metadata'] + df["metadat"] = df["metadata"] with pytest.raises(ValueError): self.index.upsert_from_dataframe(df) def test_upsert_async_upsertInputVectorsAsync(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.upsert([self.expected_vec_md1, - self.expected_vec_md2], - namespace='ns', - async_req=True) - self._assert_called_once([ - self.expected_vec_md1, - self.expected_vec_md2], - async_call=True - ) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.upsert([self.expected_vec_md1, self.expected_vec_md2], namespace="ns", async_req=True) + self._assert_called_once([self.expected_vec_md1, self.expected_vec_md2], async_call=True) def test_upsert_vectorListIsMultiplyOfBatchSize_vectorsUpsertedInBatches(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True, - side_effect=lambda stub, upsert_request, timeout: UpsertResponse( - upserted_count=len(upsert_request.vectors))) - - result = self.index.upsert([self.expected_vec_md1, - self.expected_vec_md2], - namespace='ns', - batch_size=1, - show_progress=False) + mocker.patch.object( + self.index, + "_wrap_grpc_call", + autospec=True, + side_effect=lambda stub, upsert_request, timeout: UpsertResponse( + upserted_count=len(upsert_request.vectors) + ), + ) + + result = self.index.upsert( + [self.expected_vec_md1, self.expected_vec_md2], namespace="ns", batch_size=1, show_progress=False + ) self.index._wrap_grpc_call.assert_any_call( self.index.stub.Upsert, UpsertRequest( - vectors=[ - Vector(id='vec1', values=self.vals1, metadata=dict_to_proto_struct(self.md1))], - namespace='ns'), - timeout=None) + vectors=[Vector(id="vec1", values=self.vals1, metadata=dict_to_proto_struct(self.md1))], namespace="ns" + ), + timeout=None, + ) self.index._wrap_grpc_call.assert_any_call( - self.index.stub.Upsert, - UpsertRequest( - vectors=[self.expected_vec_md2], - namespace='ns'), - timeout=None) + self.index.stub.Upsert, UpsertRequest(vectors=[self.expected_vec_md2], namespace="ns"), timeout=None + ) assert result.upserted_count == 2 def test_upsert_vectorListNotMultiplyOfBatchSize_vectorsUpsertedInBatches(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True, - side_effect=lambda stub, upsert_request, timeout: UpsertResponse( - upserted_count=len(upsert_request.vectors))) - - result = self.index.upsert([self.expected_vec_md1, - Vector(id='vec2', values=self.vals2, metadata=dict_to_proto_struct(self.md2)), - Vector(id='vec3', values=self.vals1, metadata=dict_to_proto_struct(self.md1))], - namespace='ns', - batch_size=2) + mocker.patch.object( + self.index, + "_wrap_grpc_call", + autospec=True, + side_effect=lambda stub, upsert_request, timeout: UpsertResponse( + upserted_count=len(upsert_request.vectors) + ), + ) + + result = self.index.upsert( + [ + self.expected_vec_md1, + Vector(id="vec2", values=self.vals2, metadata=dict_to_proto_struct(self.md2)), + Vector(id="vec3", values=self.vals1, metadata=dict_to_proto_struct(self.md1)), + ], + namespace="ns", + batch_size=2, + ) self.index._wrap_grpc_call.assert_any_call( self.index.stub.Upsert, - UpsertRequest( - vectors=[ - self.expected_vec_md1, - self.expected_vec_md2], - namespace='ns'), - timeout=None) + UpsertRequest(vectors=[self.expected_vec_md1, self.expected_vec_md2], namespace="ns"), + timeout=None, + ) self.index._wrap_grpc_call.assert_any_call( self.index.stub.Upsert, UpsertRequest( - vectors=[Vector(id='vec3', values=self.vals1, metadata=dict_to_proto_struct(self.md1))], - namespace='ns'), - timeout=None) + vectors=[Vector(id="vec3", values=self.vals1, metadata=dict_to_proto_struct(self.md1))], namespace="ns" + ), + timeout=None, + ) assert result.upserted_count == 3 def test_upsert_vectorListSmallerThanBatchSize_vectorsUpsertedInBatches(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True, - side_effect=lambda stub, upsert_request, timeout: UpsertResponse( - upserted_count=len(upsert_request.vectors))) + mocker.patch.object( + self.index, + "_wrap_grpc_call", + autospec=True, + side_effect=lambda stub, upsert_request, timeout: UpsertResponse( + upserted_count=len(upsert_request.vectors) + ), + ) - result = self.index.upsert([self.expected_vec_md1, - self.expected_vec_md2], - namespace='ns', - batch_size=5) - self._assert_called_once([ - self.expected_vec_md1, - self.expected_vec_md2], + result = self.index.upsert([self.expected_vec_md1, self.expected_vec_md2], namespace="ns", batch_size=5) + self._assert_called_once( + [self.expected_vec_md1, self.expected_vec_md2], ) assert result.upserted_count == 2 def test_upsert_tuplesList_vectorsUpsertedInBatches(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True, - side_effect=lambda stub, upsert_request, timeout: UpsertResponse( - upserted_count=len(upsert_request.vectors))) - - result = self.index.upsert([('vec1', self.vals1, self.md1), - ('vec2', self.vals2, self.md2), - ('vec3', self.vals1, self.md1)], - namespace='ns', - batch_size=2) + mocker.patch.object( + self.index, + "_wrap_grpc_call", + autospec=True, + side_effect=lambda stub, upsert_request, timeout: UpsertResponse( + upserted_count=len(upsert_request.vectors) + ), + ) + + result = self.index.upsert( + [("vec1", self.vals1, self.md1), ("vec2", self.vals2, self.md2), ("vec3", self.vals1, self.md1)], + namespace="ns", + batch_size=2, + ) self.index._wrap_grpc_call.assert_any_call( self.index.stub.Upsert, - UpsertRequest( - vectors=[ - self.expected_vec_md1, - self.expected_vec_md2], - namespace='ns'), - timeout=None) + UpsertRequest(vectors=[self.expected_vec_md1, self.expected_vec_md2], namespace="ns"), + timeout=None, + ) self.index._wrap_grpc_call.assert_any_call( self.index.stub.Upsert, UpsertRequest( - vectors=[Vector(id='vec3', values=self.vals1, metadata=dict_to_proto_struct(self.md1))], - namespace='ns'), - timeout=None) + vectors=[Vector(id="vec3", values=self.vals1, metadata=dict_to_proto_struct(self.md1))], namespace="ns" + ), + timeout=None, + ) assert result.upserted_count == 3 def test_upsert_batchSizeIsNotPositive_errorIsRaised(self): - with pytest.raises(ValueError, match='batch_size must be a positive integer'): - self.index.upsert([Vector(id='vec1', values=self.vals1, metadata=dict_to_proto_struct(self.md1))], - namespace='ns', - batch_size=0) - - with pytest.raises(ValueError, match='batch_size must be a positive integer'): - self.index.upsert([Vector(id='vec1', values=self.vals1, metadata=dict_to_proto_struct(self.md1))], - namespace='ns', - batch_size=-1) + with pytest.raises(ValueError, match="batch_size must be a positive integer"): + self.index.upsert( + [Vector(id="vec1", values=self.vals1, metadata=dict_to_proto_struct(self.md1))], + namespace="ns", + batch_size=0, + ) + + with pytest.raises(ValueError, match="batch_size must be a positive integer"): + self.index.upsert( + [Vector(id="vec1", values=self.vals1, metadata=dict_to_proto_struct(self.md1))], + namespace="ns", + batch_size=-1, + ) def test_upsert_useBatchSizeAndAsyncReq_valueErrorRaised(self): - with pytest.raises(ValueError, match='async_req is not supported when batch_size is provided.'): - self.index.upsert([Vector(id='vec1', values=self.vals1, metadata=dict_to_proto_struct(self.md1))], - namespace='ns', - batch_size=2, - async_req=True) + with pytest.raises(ValueError, match="async_req is not supported when batch_size is provided."): + self.index.upsert( + [Vector(id="vec1", values=self.vals1, metadata=dict_to_proto_struct(self.md1))], + namespace="ns", + batch_size=2, + async_req=True, + ) # endregion # region: query tests def test_query_byVectorNoFilter_queryVectorNoFilter(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) self.index.query(top_k=10, vector=self.vals1) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Query, @@ -430,50 +525,46 @@ def test_query_byVectorNoFilter_queryVectorNoFilter(self, mocker): ) def test_query_byVectorWithFilter_queryVectorWithFilter(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.query(top_k=10, vector=self.vals1, filter=self.filter1, namespace='ns', timeout=10) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.query(top_k=10, vector=self.vals1, filter=self.filter1, namespace="ns", timeout=10) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Query, - QueryRequest(top_k=10, vector=self.vals1, filter=dict_to_proto_struct(self.filter1), namespace='ns'), + QueryRequest(top_k=10, vector=self.vals1, filter=dict_to_proto_struct(self.filter1), namespace="ns"), timeout=10, ) def test_query_byTuplesNoFilter_queryVectorsNoFilter(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.query(top_k=10, queries=[ - (self.vals1,), - (self.vals2,) - ]) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.query(top_k=10, queries=[(self.vals1,), (self.vals2,)]) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Query, - QueryRequest(queries=[ - QueryVector(values=self.vals1, filter={}), - QueryVector(values=self.vals2, filter={}) - ], top_k=10), + QueryRequest( + queries=[QueryVector(values=self.vals1, filter={}), QueryVector(values=self.vals2, filter={})], top_k=10 + ), timeout=None, ) def test_query_byTuplesWithFilter_queryVectorsWithFilter(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.query(top_k=10, queries=[ - (self.vals1, self.filter1), - (self.vals2, self.filter2) - ]) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.query(top_k=10, queries=[(self.vals1, self.filter1), (self.vals2, self.filter2)]) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Query, - QueryRequest(queries=[ - QueryVector(values=self.vals1, filter=dict_to_proto_struct(self.filter1)), - QueryVector(values=self.vals2, filter=dict_to_proto_struct(self.filter2)) - ], top_k=10), + QueryRequest( + queries=[ + QueryVector(values=self.vals1, filter=dict_to_proto_struct(self.filter1)), + QueryVector(values=self.vals2, filter=dict_to_proto_struct(self.filter2)), + ], + top_k=10, + ), timeout=None, ) def test_query_byVecId_queryByVecId(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.query(top_k=10, id='vec1', include_metadata=True, include_values=False) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.query(top_k=10, id="vec1", include_metadata=True, include_values=False) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Query, - QueryRequest(top_k=10, id='vec1', include_metadata=True, include_values=False), + QueryRequest(top_k=10, id="vec1", include_metadata=True, include_values=False), timeout=None, ) @@ -482,34 +573,34 @@ def test_query_byVecId_queryByVecId(self, mocker): # region: delete tests def test_delete_byIds_deleteByIds(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.delete(ids=['vec1', 'vec2']) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.delete(ids=["vec1", "vec2"]) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Delete, - DeleteRequest(ids=['vec1', 'vec2']), + DeleteRequest(ids=["vec1", "vec2"]), timeout=None, ) def test_delete_byIdsAsync_deleteByIdsAsync(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.delete(ids=['vec1', 'vec2'], async_req=True) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.delete(ids=["vec1", "vec2"], async_req=True) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Delete.future, - DeleteRequest(ids=['vec1', 'vec2']), + DeleteRequest(ids=["vec1", "vec2"]), timeout=None, ) def test_delete_deleteAllByFilter_deleteAllByFilter(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.delete(delete_all=True, filter=self.filter1, namespace='ns', timeout=30) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.delete(delete_all=True, filter=self.filter1, namespace="ns", timeout=30) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Delete, - DeleteRequest(delete_all=True, filter=dict_to_proto_struct(self.filter1), namespace='ns'), + DeleteRequest(delete_all=True, filter=dict_to_proto_struct(self.filter1), namespace="ns"), timeout=30, ) def test_delete_deleteAllNoFilter_deleteNoFilter(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) self.index.delete(delete_all=True) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Delete, @@ -522,20 +613,20 @@ def test_delete_deleteAllNoFilter_deleteNoFilter(self, mocker): # region: fetch tests def test_fetch_byIds_fetchByIds(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.fetch(['vec1', 'vec2']) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.fetch(["vec1", "vec2"]) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Fetch, - FetchRequest(ids=['vec1', 'vec2']), + FetchRequest(ids=["vec1", "vec2"]), timeout=None, ) def test_fetch_byIdsAndNS_fetchByIdsAndNS(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.fetch(['vec1', 'vec2'], namespace='ns', timeout=30) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.fetch(["vec1", "vec2"], namespace="ns", timeout=30) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Fetch, - FetchRequest(ids=['vec1', 'vec2'], namespace='ns'), + FetchRequest(ids=["vec1", "vec2"], namespace="ns"), timeout=30, ) @@ -544,29 +635,29 @@ def test_fetch_byIdsAndNS_fetchByIdsAndNS(self, mocker): # region: update tests def test_update_byIdAnValues_updateByIdAndValues(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.update(id='vec1', values=self.vals1, namespace='ns', timeout=30) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.update(id="vec1", values=self.vals1, namespace="ns", timeout=30) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Update, - UpdateRequest(id='vec1', values=self.vals1, namespace='ns'), + UpdateRequest(id="vec1", values=self.vals1, namespace="ns"), timeout=30, ) def test_update_byIdAnValuesAsync_updateByIdAndValuesAsync(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.update(id='vec1', values=self.vals1, namespace='ns', timeout=30, async_req=True) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.update(id="vec1", values=self.vals1, namespace="ns", timeout=30, async_req=True) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Update.future, - UpdateRequest(id='vec1', values=self.vals1, namespace='ns'), + UpdateRequest(id="vec1", values=self.vals1, namespace="ns"), timeout=30, ) def test_update_byIdAnValuesAndMetadata_updateByIdAndValuesAndMetadata(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) - self.index.update('vec1', values=self.vals1, set_metadata=self.md1) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) + self.index.update("vec1", values=self.vals1, set_metadata=self.md1) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.Update, - UpdateRequest(id='vec1', values=self.vals1, set_metadata=dict_to_proto_struct(self.md1)), + UpdateRequest(id="vec1", values=self.vals1, set_metadata=dict_to_proto_struct(self.md1)), timeout=None, ) @@ -575,7 +666,7 @@ def test_update_byIdAnValuesAndMetadata_updateByIdAndValuesAndMetadata(self, moc # region: describe index tests def test_describeIndexStats_callWithoutFilter_CalledWithoutFilter(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) self.index.describe_index_stats() self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.DescribeIndexStats, @@ -584,7 +675,7 @@ def test_describeIndexStats_callWithoutFilter_CalledWithoutFilter(self, mocker): ) def test_describeIndexStats_callWithFilter_CalledWithFilter(self, mocker): - mocker.patch.object(self.index, '_wrap_grpc_call', autospec=True) + mocker.patch.object(self.index, "_wrap_grpc_call", autospec=True) self.index.describe_index_stats(filter=self.filter1) self.index._wrap_grpc_call.assert_called_once_with( self.index.stub.DescribeIndexStats, @@ -594,9 +685,10 @@ def test_describeIndexStats_callWithFilter_CalledWithFilter(self, mocker): # endregion + class MockUpsertDelegate: def __init__(self, upsert_response: UpsertResponse): self.response = upsert_response def result(self, timeout): - return self.response \ No newline at end of file + return self.response diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py index 7092d71c..28d502b3 100644 --- a/tests/unit/test_index.py +++ b/tests/unit/test_index.py @@ -3,36 +3,31 @@ import pytest import warnings -from pinecone.core.client.api_client import Endpoint - import pinecone +from pinecone import UpsertRequest, Vector from pinecone import DescribeIndexStatsRequest, ScoredVector, QueryResponse, UpsertResponse, SparseValues class TestRestIndex: - def setup_method(self): self.vector_dim = 8 - self.id1 = 'vec1' - self.id2 = 'vec2' + self.id1 = "vec1" + self.id2 = "vec2" self.vals1 = [0.1] * self.vector_dim self.vals2 = [0.2] * self.vector_dim - self.md1 = {'genre': 'action', 'year': 2021} - self.md2 = {'genre': 'documentary', 'year': 2020} - self.filter1 = {'genre': {'$in': ['action']}} - self.filter2 = {'year': {'$eq': 2020}} + self.md1 = {"genre": "action", "year": 2021} + self.md2 = {"genre": "documentary", "year": 2020} + self.filter1 = {"genre": {"$in": ["action"]}} + self.filter2 = {"year": {"$eq": 2020}} self.svi1 = [1, 3, 5] self.svv1 = [0.1, 0.2, 0.3] self.sv1 = { - 'indices': self.svi1, - 'values': self.svv1, + "indices": self.svi1, + "values": self.svv1, } self.svi2 = [2, 4, 6] self.svv2 = [0.1, 0.2, 0.3] - self.sv2 = { - 'indices': self.svi2, - 'values': self.svv2 - } + self.sv2 = {"indices": self.svi2, "values": self.svv2} pinecone.init(api_key='example-key') self.index = pinecone.Index('example-name') @@ -49,314 +44,364 @@ def test_upsert_numpy_deprecation_warning(self, mocker): self.index.upsert([{'id': '3', 'values': [0.5,0.5,0.5], 'sparse_values': { 'indices': np.array([0, 1]), 'values': np.array([3.0, 3.2])}}]) def test_upsert_tuplesOfIdVec_UpserWithoutMD(self, mocker): - mocker.patch.object(self.index._vector_api, 'upsert', autospec=True) - self.index.upsert([('vec1', self.vals1), ('vec2', self.vals2)], namespace='ns') + mocker.patch.object(self.index._vector_api, "upsert", autospec=True) + self.index.upsert([("vec1", self.vals1), ("vec2", self.vals2)], namespace="ns") self.index._vector_api.upsert.assert_called_once_with( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata={}), - pinecone.Vector(id='vec2', values=self.vals2, metadata={}) - ], namespace='ns') + UpsertRequest( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata={}), + Vector(id="vec2", values=self.vals2, metadata={}), + ], + namespace="ns", + ) ) def test_upsert_tuplesOfIdVecMD_UpsertVectorsWithMD(self, mocker): - mocker.patch.object(self.index._vector_api, 'upsert', autospec=True) - self.index.upsert([('vec1', self.vals1, self.md1), - ('vec2', self.vals2, self.md2)]) + mocker.patch.object(self.index._vector_api, "upsert", autospec=True) + self.index.upsert([("vec1", self.vals1, self.md1), ("vec2", self.vals2, self.md2)]) self.index._vector_api.upsert.assert_called_once_with( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2) - ]) + UpsertRequest( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + Vector(id="vec2", values=self.vals2, metadata=self.md2), + ] + ) ) def test_upsert_dictOfIdVecMD_UpsertVectorsWithMD(self, mocker): - mocker.patch.object(self.index._vector_api, 'upsert', autospec=True) - self.index.upsert([{'id': self.id1, 'values': self.vals1, 'metadata': self.md1}, - {'id': self.id2, 'values': self.vals2, 'metadata': self.md2}]) + mocker.patch.object(self.index._vector_api, "upsert", autospec=True) + self.index.upsert( + [ + {"id": self.id1, "values": self.vals1, "metadata": self.md1}, + {"id": self.id2, "values": self.vals2, "metadata": self.md2}, + ] + ) self.index._vector_api.upsert.assert_called_once_with( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2) - ]) + UpsertRequest( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + Vector(id="vec2", values=self.vals2, metadata=self.md2), + ] + ) ) def test_upsert_dictOfIdVecMD_UpsertVectorsWithoutMD(self, mocker): - mocker.patch.object(self.index._vector_api, 'upsert', autospec=True) - self.index.upsert([{'id': self.id1, 'values': self.vals1}, - {'id': self.id2, 'values': self.vals2}]) + mocker.patch.object(self.index._vector_api, "upsert", autospec=True) + self.index.upsert([{"id": self.id1, "values": self.vals1}, {"id": self.id2, "values": self.vals2}]) self.index._vector_api.upsert.assert_called_once_with( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1), - pinecone.Vector(id='vec2', values=self.vals2) - ]) + UpsertRequest(vectors=[Vector(id="vec1", values=self.vals1), Vector(id="vec2", values=self.vals2)]) ) def test_upsert_dictOfIdVecMD_UpsertVectorsWithSparseValues(self, mocker): - mocker.patch.object(self.index._vector_api, 'upsert', autospec=True) - self.index.upsert([{'id': self.id1, 'values': self.vals1, 'sparse_values': self.sv1}, - {'id': self.id2, 'values': self.vals2, 'sparse_values': self.sv2}]) + mocker.patch.object(self.index._vector_api, "upsert", autospec=True) + self.index.upsert( + [ + {"id": self.id1, "values": self.vals1, "sparse_values": self.sv1}, + {"id": self.id2, "values": self.vals2, "sparse_values": self.sv2}, + ] + ) self.index._vector_api.upsert.assert_called_once_with( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, sparse_values=SparseValues(**self.sv1)), - pinecone.Vector(id='vec2', values=self.vals2, sparse_values=SparseValues(**self.sv2)) - ]) + UpsertRequest( + vectors=[ + Vector(id="vec1", values=self.vals1, sparse_values=SparseValues(**self.sv1)), + Vector(id="vec2", values=self.vals2, sparse_values=SparseValues(**self.sv2)), + ] + ) ) def test_upsert_vectors_upsertInputVectors(self, mocker): - mocker.patch.object(self.index._vector_api, 'upsert', autospec=True) - self.index.upsert(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2)], - namespace='ns') + mocker.patch.object(self.index._vector_api, "upsert", autospec=True) + self.index.upsert( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + Vector(id="vec2", values=self.vals2, metadata=self.md2), + ], + namespace="ns", + ) self.index._vector_api.upsert.assert_called_once_with( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2) - ], namespace='ns') + UpsertRequest( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + Vector(id="vec2", values=self.vals2, metadata=self.md2), + ], + namespace="ns", + ) ) def test_upsert_parallelUpsert_callUpsertParallel(self, mocker): - mocker.patch.object(Endpoint, '__call__', autospec=True) - chunks = [[pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1)], - [pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2)]] - with pinecone.Index('example-index', pool_threads=30) as index: + chunks = [ + [Vector(id="vec1", values=self.vals1, metadata=self.md1)], + [Vector(id="vec2", values=self.vals2, metadata=self.md2)], + ] + with pinecone.Index("example-index", pool_threads=30) as index: + mocker.patch.object(index._vector_api, "upsert", autospec=True) + # Send requests in parallel async_results = [ - index.upsert(vectors=ids_vectors_chunk, namespace="ns", async_req=True) - for ids_vectors_chunk in chunks + index.upsert(vectors=ids_vectors_chunk, namespace="ns", async_req=True) for ids_vectors_chunk in chunks ] # Wait for and retrieve responses (this raises in case of error) [async_result.get() for async_result in async_results] - Endpoint.__call__.assert_any_call( - index._vector_api.upsert, - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - ], - namespace='ns'), - async_req=True + index._vector_api.upsert.assert_any_call( + UpsertRequest( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + ], + namespace="ns", + ), + async_req=True, ) - Endpoint.__call__.assert_any_call( - index._vector_api.upsert, - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2), - ], - namespace='ns'), - async_req=True + index._vector_api.upsert.assert_any_call( + UpsertRequest( + vectors=[ + Vector(id="vec2", values=self.vals2, metadata=self.md2), + ], + namespace="ns", + ), + async_req=True, ) def test_upsert_vectorListIsMultiplyOfBatchSize_vectorsUpsertedInBatches(self, mocker): - mocker.patch.object(self.index._vector_api, - 'upsert', - autospec=True, - side_effect=lambda upsert_request: UpsertResponse( - upserted_count=len(upsert_request.vectors))) - - result = self.index.upsert(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2)], - namespace='ns', + mocker.patch.object( + self.index._vector_api, + "upsert", + autospec=True, + side_effect=lambda upsert_request: UpsertResponse(upserted_count=len(upsert_request.vectors)), + ) + + result = self.index.upsert( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + Vector(id="vec2", values=self.vals2, metadata=self.md2), + ], + namespace="ns", batch_size=1, - show_progress=False) + show_progress=False, + ) self.index._vector_api.upsert.assert_any_call( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - ], namespace='ns') + UpsertRequest( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + ], + namespace="ns", + ) ) self.index._vector_api.upsert.assert_any_call( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2), - ], namespace='ns') + UpsertRequest( + vectors=[ + Vector(id="vec2", values=self.vals2, metadata=self.md2), + ], + namespace="ns", + ) ) assert result.upserted_count == 2 def test_upsert_vectorListNotMultiplyOfBatchSize_vectorsUpsertedInBatches(self, mocker): - mocker.patch.object(self.index._vector_api, - 'upsert', - autospec=True, - side_effect=lambda upsert_request: UpsertResponse( - upserted_count=len(upsert_request.vectors))) - - result = self.index.upsert(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2), - pinecone.Vector(id='vec3', values=self.vals1, metadata=self.md1)], - namespace='ns', - batch_size=2) + mocker.patch.object( + self.index._vector_api, + "upsert", + autospec=True, + side_effect=lambda upsert_request: UpsertResponse(upserted_count=len(upsert_request.vectors)), + ) + + result = self.index.upsert( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + Vector(id="vec2", values=self.vals2, metadata=self.md2), + Vector(id="vec3", values=self.vals1, metadata=self.md1), + ], + namespace="ns", + batch_size=2, + ) self.index._vector_api.upsert.assert_any_call( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2), - ], namespace='ns') + UpsertRequest( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + Vector(id="vec2", values=self.vals2, metadata=self.md2), + ], + namespace="ns", + ) ) self.index._vector_api.upsert.assert_any_call( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec3', values=self.vals1, metadata=self.md1), - ], namespace='ns') + UpsertRequest( + vectors=[ + Vector(id="vec3", values=self.vals1, metadata=self.md1), + ], + namespace="ns", + ) ) assert result.upserted_count == 3 def test_upsert_vectorListSmallerThanBatchSize_vectorsUpsertedInBatches(self, mocker): - mocker.patch.object(self.index._vector_api, - 'upsert', - autospec=True, - side_effect=lambda upsert_request: UpsertResponse( - upserted_count=len(upsert_request.vectors))) - - result = self.index.upsert(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2), - pinecone.Vector(id='vec3', values=self.vals1, metadata=self.md1)], - namespace='ns', - batch_size=5) + mocker.patch.object( + self.index._vector_api, + "upsert", + autospec=True, + side_effect=lambda upsert_request: UpsertResponse(upserted_count=len(upsert_request.vectors)), + ) + + result = self.index.upsert( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + Vector(id="vec2", values=self.vals2, metadata=self.md2), + Vector(id="vec3", values=self.vals1, metadata=self.md1), + ], + namespace="ns", + batch_size=5, + ) self.index._vector_api.upsert.assert_called_once_with( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2), - pinecone.Vector(id='vec3', values=self.vals1, metadata=self.md1), - ], namespace='ns') + UpsertRequest( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + Vector(id="vec2", values=self.vals2, metadata=self.md2), + Vector(id="vec3", values=self.vals1, metadata=self.md1), + ], + namespace="ns", + ) ) assert result.upserted_count == 3 def test_upsert_tuplesList_vectorsUpsertedInBatches(self, mocker): - mocker.patch.object(self.index._vector_api, - 'upsert', - autospec=True, - side_effect=lambda upsert_request: UpsertResponse( - upserted_count=len(upsert_request.vectors))) - - result = self.index.upsert(vectors= - [('vec1', self.vals1, self.md1), - ('vec2', self.vals2, self.md2), - ('vec3', self.vals1, self.md1)], - namespace='ns', - batch_size=2) + mocker.patch.object( + self.index._vector_api, + "upsert", + autospec=True, + side_effect=lambda upsert_request: UpsertResponse(upserted_count=len(upsert_request.vectors)), + ) + + result = self.index.upsert( + vectors=[("vec1", self.vals1, self.md1), ("vec2", self.vals2, self.md2), ("vec3", self.vals1, self.md1)], + namespace="ns", + batch_size=2, + ) self.index._vector_api.upsert.assert_any_call( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2), - ], namespace='ns') + UpsertRequest( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + Vector(id="vec2", values=self.vals2, metadata=self.md2), + ], + namespace="ns", + ) ) self.index._vector_api.upsert.assert_any_call( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec3', values=self.vals1, metadata=self.md1), - ], namespace='ns') + UpsertRequest( + vectors=[ + Vector(id="vec3", values=self.vals1, metadata=self.md1), + ], + namespace="ns", + ) ) assert result.upserted_count == 3 def test_upsert_dataframe(self, mocker): - mocker.patch.object(self.index._vector_api, 'upsert', autospec=True, return_value=UpsertResponse(upserted_count=2)) - df = pd.DataFrame([ - {'id': self.id1, 'values': self.vals1, 'metadata': self.md1}, - {'id': self.id2, 'values': self.vals2, 'metadata': self.md2} - ]) + mocker.patch.object( + self.index._vector_api, "upsert", autospec=True, return_value=UpsertResponse(upserted_count=2) + ) + df = pd.DataFrame( + [ + {"id": self.id1, "values": self.vals1, "metadata": self.md1}, + {"id": self.id2, "values": self.vals2, "metadata": self.md2}, + ] + ) self.index.upsert_from_dataframe(df) self.index._vector_api.upsert.assert_called_once_with( - pinecone.UpsertRequest(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1), - pinecone.Vector(id='vec2', values=self.vals2, metadata=self.md2) - ]) + UpsertRequest( + vectors=[ + Vector(id="vec1", values=self.vals1, metadata=self.md1), + Vector(id="vec2", values=self.vals2, metadata=self.md2), + ] + ) ) def test_upsert_batchSizeIsNotPositive_errorIsRaised(self): - with pytest.raises(ValueError, match='batch_size must be a positive integer'): - self.index.upsert(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1)], - namespace='ns', - batch_size=0) - - with pytest.raises(ValueError, match='batch_size must be a positive integer'): - self.index.upsert(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1)], - namespace='ns', - batch_size=-1) + with pytest.raises(ValueError, match="batch_size must be a positive integer"): + self.index.upsert( + vectors=[Vector(id="vec1", values=self.vals1, metadata=self.md1)], namespace="ns", batch_size=0 + ) + + with pytest.raises(ValueError, match="batch_size must be a positive integer"): + self.index.upsert( + vectors=[Vector(id="vec1", values=self.vals1, metadata=self.md1)], + namespace="ns", + batch_size=-1, + ) def test_upsert_useBatchSizeAndAsyncReq_valueErrorRaised(self): - with pytest.raises(ValueError, match='async_req is not supported when batch_size is provided.'): - self.index.upsert(vectors=[ - pinecone.Vector(id='vec1', values=self.vals1, metadata=self.md1)], - namespace='ns', + with pytest.raises(ValueError, match="async_req is not supported when batch_size is provided."): + self.index.upsert( + vectors=[Vector(id="vec1", values=self.vals1, metadata=self.md1)], + namespace="ns", batch_size=1, - async_req=True) + async_req=True, + ) # endregion # region: query tests def test_query_byVectorNoFilter_queryVectorNoFilter(self, mocker): - response = QueryResponse(results=[], - matches=[ScoredVector(id="1", - score=0.9, - values=[0.0], - metadata={"a": 2})], - namespace="test") + response = QueryResponse( + results=[], matches=[ScoredVector(id="1", score=0.9, values=[0.0], metadata={"a": 2})], namespace="test" + ) - mocker.patch.object(self.index._vector_api, 'query', autospec=True, return_value=response) + mocker.patch.object(self.index._vector_api, "query", autospec=True, return_value=response) actual = self.index.query(top_k=10, vector=self.vals1) - self.index._vector_api.query.assert_called_once_with( - pinecone.QueryRequest(top_k=10, vector=self.vals1) + self.index._vector_api.query.assert_called_once_with(pinecone.QueryRequest(top_k=10, vector=self.vals1)) + expected = QueryResponse( + matches=[ScoredVector(id="1", score=0.9, values=[0.0], metadata={"a": 2})], namespace="test" ) - expected = QueryResponse(matches=[ScoredVector(id="1", - score=0.9, - values=[0.0], - metadata={"a": 2})], - namespace="test") - expected._data_store.pop('results', None) - assert actual == expected + assert expected.to_dict() == actual.to_dict() def test_query_byVectorWithFilter_queryVectorWithFilter(self, mocker): - mocker.patch.object(self.index._vector_api, 'query', autospec=True) - self.index.query(top_k=10, vector=self.vals1, filter=self.filter1, namespace='ns') + mocker.patch.object(self.index._vector_api, "query", autospec=True) + self.index.query(top_k=10, vector=self.vals1, filter=self.filter1, namespace="ns") self.index._vector_api.query.assert_called_once_with( - pinecone.QueryRequest(top_k=10, vector=self.vals1, filter=self.filter1, namespace='ns') + pinecone.QueryRequest(top_k=10, vector=self.vals1, filter=self.filter1, namespace="ns") ) def test_query_byTuplesNoFilter_queryVectorsNoFilter(self, mocker): - mocker.patch.object(self.index._vector_api, 'query', autospec=True) - self.index.query(top_k=10, queries=[ - (self.vals1,), - (self.vals2,) - ]) + mocker.patch.object(self.index._vector_api, "query", autospec=True) + self.index.query(top_k=10, queries=[(self.vals1,), (self.vals2,)]) self.index._vector_api.query.assert_called_once_with( - pinecone.QueryRequest(top_k=10, queries=[ - pinecone.QueryVector(values=self.vals1), - pinecone.QueryVector(values=self.vals2) - ]) + pinecone.QueryRequest( + top_k=10, queries=[pinecone.QueryVector(values=self.vals1), pinecone.QueryVector(values=self.vals2)] + ) ) def test_query_byTuplesWithFilter_queryVectorsWithFilter(self, mocker): - mocker.patch.object(self.index._vector_api, 'query', autospec=True) - self.index.query(top_k=10, queries=[ - (self.vals1, self.filter1), - (self.vals2, self.filter2) - ]) + mocker.patch.object(self.index._vector_api, "query", autospec=True) + self.index.query(top_k=10, queries=[(self.vals1, self.filter1), (self.vals2, self.filter2)]) self.index._vector_api.query.assert_called_once_with( - pinecone.QueryRequest(top_k=10, queries=[ - pinecone.QueryVector(values=self.vals1, filter=self.filter1), - pinecone.QueryVector(values=self.vals2, filter=self.filter2) - ]) + pinecone.QueryRequest( + top_k=10, + queries=[ + pinecone.QueryVector(values=self.vals1, filter=self.filter1), + pinecone.QueryVector(values=self.vals2, filter=self.filter2), + ], + ) ) def test_query_byVecId_queryByVecId(self, mocker): - mocker.patch.object(self.index._vector_api, 'query', autospec=True) - self.index.query(top_k=10, id='vec1', include_metadata=True, include_values=False) + mocker.patch.object(self.index._vector_api, "query", autospec=True) + self.index.query(top_k=10, id="vec1", include_metadata=True, include_values=False) self.index._vector_api.query.assert_called_once_with( - pinecone.QueryRequest(top_k=10, id='vec1', include_metadata=True, include_values=False) + pinecone.QueryRequest(top_k=10, id="vec1", include_metadata=True, include_values=False) ) # endregion @@ -364,60 +409,52 @@ def test_query_byVecId_queryByVecId(self, mocker): # region: delete tests def test_delete_byIds_deleteByIds(self, mocker): - mocker.patch.object(self.index._vector_api, 'delete', autospec=True) - self.index.delete(ids=['vec1', 'vec2']) - self.index._vector_api.delete.assert_called_once_with( - pinecone.DeleteRequest(ids=['vec1', 'vec2']) - ) + mocker.patch.object(self.index._vector_api, "delete", autospec=True) + self.index.delete(ids=["vec1", "vec2"]) + self.index._vector_api.delete.assert_called_once_with(pinecone.DeleteRequest(ids=["vec1", "vec2"])) def test_delete_deleteAllByFilter_deleteAllByFilter(self, mocker): - mocker.patch.object(self.index._vector_api, 'delete', autospec=True) - self.index.delete(delete_all=True, filter=self.filter1, namespace='ns') + mocker.patch.object(self.index._vector_api, "delete", autospec=True) + self.index.delete(delete_all=True, filter=self.filter1, namespace="ns") self.index._vector_api.delete.assert_called_once_with( - pinecone.DeleteRequest(delete_all=True, filter=self.filter1, namespace='ns') + pinecone.DeleteRequest(delete_all=True, filter=self.filter1, namespace="ns") ) def test_delete_deleteAllNoFilter_deleteNoFilter(self, mocker): - mocker.patch.object(self.index._vector_api, 'delete', autospec=True) + mocker.patch.object(self.index._vector_api, "delete", autospec=True) self.index.delete(delete_all=True) - self.index._vector_api.delete.assert_called_once_with( - pinecone.DeleteRequest(delete_all=True) - ) + self.index._vector_api.delete.assert_called_once_with(pinecone.DeleteRequest(delete_all=True)) # endregion # region: fetch tests def test_fetch_byIds_fetchByIds(self, mocker): - mocker.patch.object(self.index._vector_api, 'fetch', autospec=True) - self.index.fetch(ids=['vec1', 'vec2']) - self.index._vector_api.fetch.assert_called_once_with( - ids=['vec1', 'vec2'] - ) + mocker.patch.object(self.index._vector_api, "fetch", autospec=True) + self.index.fetch(ids=["vec1", "vec2"]) + self.index._vector_api.fetch.assert_called_once_with(ids=["vec1", "vec2"]) def test_fetch_byIdsAndNS_fetchByIdsAndNS(self, mocker): - mocker.patch.object(self.index._vector_api, 'fetch', autospec=True) - self.index.fetch(ids=['vec1', 'vec2'], namespace='ns') - self.index._vector_api.fetch.assert_called_once_with( - ids=['vec1', 'vec2'], namespace='ns' - ) + mocker.patch.object(self.index._vector_api, "fetch", autospec=True) + self.index.fetch(ids=["vec1", "vec2"], namespace="ns") + self.index._vector_api.fetch.assert_called_once_with(ids=["vec1", "vec2"], namespace="ns") # endregion # region: update tests def test_update_byIdAnValues_updateByIdAndValues(self, mocker): - mocker.patch.object(self.index._vector_api, 'update', autospec=True) - self.index.update(id='vec1', values=self.vals1, namespace='ns') + mocker.patch.object(self.index._vector_api, "update", autospec=True) + self.index.update(id="vec1", values=self.vals1, namespace="ns") self.index._vector_api.update.assert_called_once_with( - pinecone.UpdateRequest(id='vec1', values=self.vals1, namespace='ns') + pinecone.UpdateRequest(id="vec1", values=self.vals1, namespace="ns") ) def test_update_byIdAnValuesAndMetadata_updateByIdAndValuesAndMetadata(self, mocker): - mocker.patch.object(self.index._vector_api, 'update', autospec=True) - self.index.update('vec1', values=self.vals1, metadata=self.md1) + mocker.patch.object(self.index._vector_api, "update", autospec=True) + self.index.update("vec1", values=self.vals1, metadata=self.md1) self.index._vector_api.update.assert_called_once_with( - pinecone.UpdateRequest(id='vec1', values=self.vals1, metadata=self.md1) + pinecone.UpdateRequest(id="vec1", values=self.vals1, metadata=self.md1) ) # endregion @@ -425,15 +462,15 @@ def test_update_byIdAnValuesAndMetadata_updateByIdAndValuesAndMetadata(self, moc # region: describe index tests def test_describeIndexStats_callWithoutFilter_CalledWithoutFilter(self, mocker): - mocker.patch.object(self.index._vector_api, 'describe_index_stats', autospec=True) + mocker.patch.object(self.index._vector_api, "describe_index_stats", autospec=True) self.index.describe_index_stats() - self.index._vector_api.describe_index_stats.assert_called_once_with( - DescribeIndexStatsRequest()) + self.index._vector_api.describe_index_stats.assert_called_once_with(DescribeIndexStatsRequest()) def test_describeIndexStats_callWithFilter_CalledWithFilter(self, mocker): - mocker.patch.object(self.index._vector_api, 'describe_index_stats', autospec=True) + mocker.patch.object(self.index._vector_api, "describe_index_stats", autospec=True) self.index.describe_index_stats(filter=self.filter1) self.index._vector_api.describe_index_stats.assert_called_once_with( - DescribeIndexStatsRequest(filter=self.filter1)) + DescribeIndexStatsRequest(filter=self.filter1) + ) # endregion diff --git a/tests/unit/test_manage.py b/tests/unit/test_manage.py index 70c7dedb..0a65d67c 100644 --- a/tests/unit/test_manage.py +++ b/tests/unit/test_manage.py @@ -27,7 +27,7 @@ def test_create_index_with_timeout(self, mocker, timeout_value, get_status_calls mocker.patch('pinecone.manage._get_status', side_effect=get_status_responses) mocker.patch('time.sleep') - pinecone.manage.create_index("my-index", 10, timeout=timeout_value) + pinecone.manage.create_index("my-index", 10, timeout=timeout_value, cloud="aws", region="us-west1", capacity_mode="pod") pinecone.manage._get_api_instance.assert_called_once() assert pinecone.manage._get_status.call_count == get_status_calls