Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ type-check:
uv run mypy pinecone --exclude pinecone/core

generate-oas:
./codegen/build-oas.sh "2024-07"
./codegen/build-oas.sh "2025-10"

version:
@python -c "import re; print(re.search(r'version = \"([^\"]+)\"', open('pyproject.toml').read()).group(1))"
Expand Down
2 changes: 1 addition & 1 deletion codegen/apis
Submodule apis updated from c968f9 to be7f2b
2 changes: 1 addition & 1 deletion codegen/python-oas-templates
4 changes: 2 additions & 2 deletions pinecone/core/openapi/db_control/api/manage_indexes_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def __configure_index(
) -> IndexModel | ApplyResult[IndexModel]:
"""Configure an index # noqa: E501

Configure an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501
Configure an existing index. For guidance and examples, see [Manage indexes](https://docs.pinecone.io/guides/manage-data/manage-indexes). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True

Expand Down Expand Up @@ -1562,7 +1562,7 @@ async def __configure_index(
) -> IndexModel:
"""Configure an index # noqa: E501

Configure an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501
Configure an existing index. For guidance and examples, see [Manage indexes](https://docs.pinecone.io/guides/manage-data/manage-indexes). # noqa: E501


Args:
Expand Down
12 changes: 6 additions & 6 deletions pinecone/core/openapi/db_control/model/backup_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,16 +29,16 @@
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema
from pinecone.core.openapi.db_control.model.index_tags import IndexTags
from pinecone.core.openapi.db_control.model.metadata_schema import MetadataSchema


def lazy_import():
from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema
from pinecone.core.openapi.db_control.model.index_tags import IndexTags
from pinecone.core.openapi.db_control.model.metadata_schema import MetadataSchema

globals()["BackupModelSchema"] = BackupModelSchema
globals()["IndexTags"] = IndexTags
globals()["MetadataSchema"] = MetadataSchema


from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar
Expand Down Expand Up @@ -112,7 +112,7 @@ def openapi_types(cls):
"description": (str,), # noqa: E501
"dimension": (int,), # noqa: E501
"metric": (str,), # noqa: E501
"schema": (BackupModelSchema,), # noqa: E501
"schema": (MetadataSchema,), # noqa: E501
"record_count": (int,), # noqa: E501
"namespace_count": (int,), # noqa: E501
"size_bytes": (int,), # noqa: E501
Expand Down Expand Up @@ -216,7 +216,7 @@ def _from_openapi_data(
description (str): Optional description providing context for the backup. [optional] # noqa: E501
dimension (int): The dimensions of the vectors to be inserted in the index. [optional] # noqa: E501
metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If the 'vector_type' is 'sparse', the metric must be 'dotproduct'. If the `vector_type` is `dense`, the metric defaults to 'cosine'. Possible values: `cosine`, `euclidean`, or `dotproduct`. [optional] # noqa: E501
schema (BackupModelSchema): [optional] # noqa: E501
schema (MetadataSchema): [optional] # noqa: E501
record_count (int): Total number of records in the backup. [optional] # noqa: E501
namespace_count (int): Number of namespaces in the backup. [optional] # noqa: E501
size_bytes (int): Size of the backup in bytes. [optional] # noqa: E501
Expand Down Expand Up @@ -331,7 +331,7 @@ def __init__(
description (str): Optional description providing context for the backup. [optional] # noqa: E501
dimension (int): The dimensions of the vectors to be inserted in the index. [optional] # noqa: E501
metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If the 'vector_type' is 'sparse', the metric must be 'dotproduct'. If the `vector_type` is `dense`, the metric defaults to 'cosine'. Possible values: `cosine`, `euclidean`, or `dotproduct`. [optional] # noqa: E501
schema (BackupModelSchema): [optional] # noqa: E501
schema (MetadataSchema): [optional] # noqa: E501
record_count (int): Total number of records in the backup. [optional] # noqa: E501
namespace_count (int): Number of namespaces in the backup. [optional] # noqa: E501
size_bytes (int): Size of the backup in bytes. [optional] # noqa: E501
Expand Down
19 changes: 13 additions & 6 deletions pinecone/core/openapi/db_control/model/byoc_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,16 @@
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema
from pinecone.core.openapi.db_control.model.metadata_schema import MetadataSchema
from pinecone.core.openapi.db_control.model.read_capacity import ReadCapacity


def lazy_import():
from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema
from pinecone.core.openapi.db_control.model.metadata_schema import MetadataSchema
from pinecone.core.openapi.db_control.model.read_capacity import ReadCapacity

globals()["BackupModelSchema"] = BackupModelSchema
globals()["MetadataSchema"] = MetadataSchema
globals()["ReadCapacity"] = ReadCapacity


from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar
Expand Down Expand Up @@ -98,7 +101,8 @@ def openapi_types(cls):
lazy_import()
return {
"environment": (str,), # noqa: E501
"schema": (BackupModelSchema,), # noqa: E501
"read_capacity": (ReadCapacity,), # noqa: E501
"schema": (MetadataSchema,), # noqa: E501
}

@cached_class_property
Expand All @@ -107,6 +111,7 @@ def discriminator(cls):

attribute_map: Dict[str, str] = {
"environment": "environment", # noqa: E501
"read_capacity": "read_capacity", # noqa: E501
"schema": "schema", # noqa: E501
}

Expand Down Expand Up @@ -164,7 +169,8 @@ def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
schema (BackupModelSchema): [optional] # noqa: E501
read_capacity (ReadCapacity): [optional] # noqa: E501
schema (MetadataSchema): [optional] # noqa: E501
"""

_enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False)
Expand Down Expand Up @@ -258,7 +264,8 @@ def __init__(self, environment, *args, **kwargs) -> None: # noqa: E501
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
schema (BackupModelSchema): [optional] # noqa: E501
read_capacity (ReadCapacity): [optional] # noqa: E501
schema (MetadataSchema): [optional] # noqa: E501
"""

_enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True)
Expand Down
Loading
Loading