diff --git a/.github/workflows/testing-integration-asyncio.yaml b/.github/workflows/testing-integration-asyncio.yaml index 8c8fab605..eb5e3b91f 100644 --- a/.github/workflows/testing-integration-asyncio.yaml +++ b/.github/workflows/testing-integration-asyncio.yaml @@ -51,6 +51,6 @@ jobs: include_asyncio: true include_dev: true - name: 'db_control asyncio' - run: poetry run pytest tests/integration/control_asyncio --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG + run: poetry run pytest tests/integration/control_asyncio/*.py --retries 5 --retry-delay 35 -s -vv --log-cli-level=DEBUG env: PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' diff --git a/.github/workflows/testing-integration.yaml b/.github/workflows/testing-integration.yaml index fa153f7e2..5175d4dca 100644 --- a/.github/workflows/testing-integration.yaml +++ b/.github/workflows/testing-integration.yaml @@ -7,7 +7,7 @@ jobs: name: Reorg tests runs-on: ubuntu-latest env: - PINECONE_DEBUG_CURL: 'true' + PINECONE_DEBUG_CURL: 'false' PINECONE_API_KEY: '${{ secrets.PINECONE_API_KEY }}' PINECONE_ADDITIONAL_HEADERS: '{"sdk-test-suite": "pinecone-python-client"}' strategy: @@ -15,13 +15,13 @@ jobs: matrix: python_version: [3.9, 3.12] test_suite: - - tests/integration/control/index - - tests/integration/control/collections - - tests/integration/control/backup - - tests/integration/control/restore_job - - tests/integration/control_asyncio/index - - tests/integration/control_asyncio/backup - - tests/integration/control_asyncio/restore_job + - tests/integration/control/resources/index + - tests/integration/control/resources/collections + - tests/integration/control/resources/backup + - tests/integration/control/resources/restore_job + - tests/integration/control_asyncio/resources/index + - tests/integration/control_asyncio/resources/backup + - tests/integration/control_asyncio/resources/restore_job steps: - uses: actions/checkout@v4 - name: 'Set up Python ${{ matrix.python_version }}' diff --git a/pinecone/db_control/resources/sync/backup.py b/pinecone/db_control/resources/sync/backup.py index 123b33fbf..dbc576e99 100644 --- a/pinecone/db_control/resources/sync/backup.py +++ b/pinecone/db_control/resources/sync/backup.py @@ -23,7 +23,7 @@ def list( List backups for an index or for the project. Args: - index_name (str): The name of the index to list backups for. + index_name (str): The name of the index to list backups for. If not provided, list all backups for the project. limit (int): The maximum number of backups to return. pagination_token (str): The pagination token to use for the next page of backups. """ diff --git a/tests/integration/control/backup/conftest.py b/tests/integration/control/backup/conftest.py deleted file mode 100644 index 9798da273..000000000 --- a/tests/integration/control/backup/conftest.py +++ /dev/null @@ -1,168 +0,0 @@ -import pytest -import uuid -import time -import logging -import dotenv -from pinecone import Pinecone, NotFoundException, PineconeApiException -from ...helpers import generate_index_name, get_environment_var, index_tags as index_tags_helper - -dotenv.load_dotenv() - -logger = logging.getLogger(__name__) -""" @private """ - -# Generate a unique ID for the entire test run -RUN_ID = str(uuid.uuid4()) - - -@pytest.fixture() -def index_tags(request): - return index_tags_helper(request, RUN_ID) - - -@pytest.fixture() -def pc(): - api_key = get_environment_var("PINECONE_API_KEY") - return Pinecone( - api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} - ) - - -@pytest.fixture() -def serverless_cloud(): - return get_environment_var("SERVERLESS_CLOUD", "aws") - - -@pytest.fixture() -def serverless_region(): - return get_environment_var("SERVERLESS_REGION", "us-west-2") - - -@pytest.fixture() -def create_sl_index_params(index_name, serverless_cloud, serverless_region, index_tags): - spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - return dict(name=index_name, dimension=10, metric="cosine", spec=spec, tags=index_tags) - - -@pytest.fixture() -def index_name(request): - test_name = request.node.name - return generate_index_name(test_name) - - -@pytest.fixture() -def ready_sl_index(pc, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = None - pc.create_index(**create_sl_index_params) - yield index_name - pc.db.index.delete(name=index_name, timeout=-1) - - -def delete_with_retry(pc, index_name, retries=0, sleep_interval=5): - logger.debug( - "Deleting index " - + index_name - + ", retry " - + str(retries) - + ", next sleep interval " - + str(sleep_interval) - ) - try: - pc.db.index.delete(name=index_name, timeout=-1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise Exception("Unable to delete index " + index_name) - time.sleep(sleep_interval) - delete_with_retry(pc, index_name, retries + 1, sleep_interval * 2) - else: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - except Exception as e: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - - -@pytest.fixture(autouse=True) -def cleanup(pc, index_name): - yield - - try: - desc = pc.db.index.describe(name=index_name) - if desc.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {index_name}") - pc.db.index.configure(name=index_name, deletion_protection="disabled") - logger.debug("Attempting to delete index with name: " + index_name) - pc.db.index.delete(name=index_name, timeout=-1) - except Exception: - pass - - for backup in pc.db.backup.list(): - logger.debug(f"Deleting backup: {backup.name}") - try: - pc.db.backup.delete(backup_id=backup.backup_id) - except Exception as e: - logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") - - -def pytest_sessionfinish(session, exitstatus): - """ - Hook that runs after all tests have completed. - This is a good place to clean up any resources that were created during the test session. - """ - logger.info("Running final cleanup after all tests...") - - try: - pc = Pinecone() - indexes = pc.db.index.list() - test_indexes = [ - idx for idx in indexes if idx.tags is not None and idx.tags.get("test-run") == RUN_ID - ] - - logger.info(f"Indexes to delete: {[idx.name for idx in test_indexes]}") - - for idx in test_indexes: - if idx.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {idx.name}") - pc.db.index.configure(name=idx.name, deletion_protection="disabled") - # Wait for index to be updated with status ready - logger.info(f"Waiting for index {idx.name} to be ready...") - timeout = 60 - while True and timeout > 0: - is_ready = pc.db.index.describe(name=idx.name).ready - if is_ready: - break - time.sleep(1) - timeout -= 1 - if timeout <= 0: - logger.warning(f"Index {idx.name} did not become ready in time") - else: - logger.info(f"Deletion protection is already disabled for index: {idx.name}") - - for idx in test_indexes: - try: - logger.info(f"Deleting index: {idx.name}") - pc.db.index.delete(name=idx.name, timeout=-1) - except Exception as e: - logger.warning(f"Failed to delete index {idx.name}: {str(e)}") - - backups = pc.db.backup.list() - if len(backups) > 0: - logger.info(f"Deleting {len(backups)} backups") - for backup in backups: - logger.debug(f"Deleting backup: {backup.name}") - try: - pc.db.backup.delete(backup_id=backup.backup_id) - except Exception as e: - logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") - else: - logger.info("No backups to delete") - - except Exception as e: - logger.error(f"Error during final cleanup: {str(e)}") - - logger.info("Final cleanup completed") diff --git a/tests/integration/control/collections/conftest.py b/tests/integration/control/collections/conftest.py deleted file mode 100644 index bb592cee2..000000000 --- a/tests/integration/control/collections/conftest.py +++ /dev/null @@ -1,136 +0,0 @@ -import pytest -import uuid -import time -import logging -import dotenv -import os -from datetime import datetime -from pinecone import Pinecone, NotFoundException, PineconeApiException -from ...helpers import get_environment_var - -dotenv.load_dotenv() - -logger = logging.getLogger(__name__) -""" @private """ - -# Generate a unique ID for the entire test run -RUN_ID = str(uuid.uuid4()) - - -@pytest.fixture() -def index_tags(request): - test_name = request.node.name - if test_name is None: - test_name = "" - else: - test_name = test_name.replace(":", "_").replace("[", "_").replace("]", "_") - - tags = { - "test-suite": "pinecone-python-client", - "test-run": RUN_ID, - "test": test_name, - "created-at": datetime.now().strftime("%Y-%m-%d"), - } - - if os.getenv("USER"): - tags["user"] = os.getenv("USER") - return tags - - -@pytest.fixture() -def pc(): - api_key = get_environment_var("PINECONE_API_KEY") - return Pinecone( - api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} - ) - - -@pytest.fixture() -def pod_environment(): - return get_environment_var("PINECONE_ENVIRONMENT", "us-east1-gcp") - - -def delete_with_retry(pc, index_name, retries=0, sleep_interval=5): - logger.debug( - "Deleting index " - + index_name - + ", retry " - + str(retries) - + ", next sleep interval " - + str(sleep_interval) - ) - try: - pc.db.index.delete(name=index_name, timeout=-1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise Exception("Unable to delete index " + index_name) - time.sleep(sleep_interval) - delete_with_retry(pc, index_name, retries + 1, sleep_interval * 2) - else: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - except Exception as e: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - - -def pytest_sessionfinish(session, exitstatus): - """ - Hook that runs after all tests have completed. - This is a good place to clean up any resources that were created during the test session. - """ - logger.info("Running final cleanup after all collection tests...") - - try: - pc = Pinecone() - indexes = pc.db.index.list() - test_indexes = [ - idx for idx in indexes if idx.tags is not None and idx.tags.get("test-run") == RUN_ID - ] - - logger.info(f"Indexes to delete: {[idx.name for idx in test_indexes]}") - - for idx in test_indexes: - if idx.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {idx.name}") - pc.db.index.configure(name=idx.name, deletion_protection="disabled") - # Wait for index to be updated with status ready - logger.info(f"Waiting for index {idx.name} to be ready...") - timeout = 60 - while True and timeout > 0: - is_ready = pc.db.index.describe(name=idx.name).ready - if is_ready: - break - time.sleep(1) - timeout -= 1 - if timeout <= 0: - logger.warning(f"Index {idx.name} did not become ready in time") - else: - logger.info(f"Deletion protection is already disabled for index: {idx.name}") - - for idx in test_indexes: - try: - logger.info(f"Deleting index: {idx.name}") - pc.db.index.delete(name=idx.name, timeout=-1) - except Exception as e: - logger.warning(f"Failed to delete index {idx.name}: {str(e)}") - - collections = pc.db.collection.list() - logger.info(f"Collections to delete: {[col.name for col in collections]}") - - for col in collections: - try: - logger.info(f"Deleting collection: {col.name}") - pc.db.collection.delete(name=col.name) - except Exception as e: - logger.warning(f"Failed to delete collection {col.name}: {str(e)}") - - except Exception as e: - logger.error(f"Error during final cleanup: {str(e)}") - - logger.info("Final cleanup of collections tests completed") diff --git a/tests/integration/control/index/conftest.py b/tests/integration/control/index/conftest.py deleted file mode 100644 index 985c4bb69..000000000 --- a/tests/integration/control/index/conftest.py +++ /dev/null @@ -1,161 +0,0 @@ -import pytest -import uuid -import time -import logging -import dotenv -from pinecone import Pinecone, NotFoundException, PineconeApiException -from ...helpers import generate_index_name, get_environment_var, index_tags as index_tags_helper - -dotenv.load_dotenv() - -logger = logging.getLogger(__name__) -""" @private """ - -# Generate a unique ID for the entire test run -RUN_ID = str(uuid.uuid4()) - - -@pytest.fixture() -def index_tags(request): - return index_tags_helper(request, RUN_ID) - - -@pytest.fixture() -def pc(): - api_key = get_environment_var("PINECONE_API_KEY") - return Pinecone( - api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} - ) - - -@pytest.fixture() -def pod_environment(): - return get_environment_var("PINECONE_ENVIRONMENT", "us-east1-gcp") - - -@pytest.fixture() -def serverless_cloud(): - return get_environment_var("SERVERLESS_CLOUD", "aws") - - -@pytest.fixture() -def serverless_region(): - return get_environment_var("SERVERLESS_REGION", "us-west-2") - - -@pytest.fixture() -def create_sl_index_params(index_name, serverless_cloud, serverless_region, index_tags): - spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - return dict(name=index_name, dimension=10, metric="cosine", spec=spec, tags=index_tags) - - -@pytest.fixture() -def index_name(request): - test_name = request.node.name - return generate_index_name(test_name) - - -@pytest.fixture() -def ready_sl_index(pc, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = None - pc.create_index(**create_sl_index_params) - yield index_name - pc.db.index.delete(name=index_name, timeout=-1) - - -@pytest.fixture() -def notready_sl_index(pc, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = -1 - pc.create_index(**create_sl_index_params) - yield index_name - - -def delete_with_retry(pc, index_name, retries=0, sleep_interval=5): - logger.debug( - "Deleting index " - + index_name - + ", retry " - + str(retries) - + ", next sleep interval " - + str(sleep_interval) - ) - try: - pc.db.index.delete(name=index_name, timeout=-1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise Exception("Unable to delete index " + index_name) - time.sleep(sleep_interval) - delete_with_retry(pc, index_name, retries + 1, sleep_interval * 2) - else: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - except Exception as e: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - - -@pytest.fixture(autouse=True) -def cleanup(pc, index_name): - yield - - try: - desc = pc.db.index.describe(name=index_name) - if desc.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {index_name}") - pc.db.index.configure(name=index_name, deletion_protection="disabled") - logger.debug("Attempting to delete index with name: " + index_name) - pc.db.index.delete(name=index_name, timeout=-1) - except Exception: - pass - - -def pytest_sessionfinish(session, exitstatus): - """ - Hook that runs after all tests have completed. - This is a good place to clean up any resources that were created during the test session. - """ - logger.info("Running final cleanup after all tests...") - - try: - pc = Pinecone() - indexes = pc.db.index.list() - test_indexes = [ - idx for idx in indexes if idx.tags is not None and idx.tags.get("test-run") == RUN_ID - ] - - logger.info(f"Indexes to delete: {[idx.name for idx in test_indexes]}") - - for idx in test_indexes: - if idx.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {idx.name}") - pc.db.index.configure(name=idx.name, deletion_protection="disabled") - # Wait for index to be updated with status ready - logger.info(f"Waiting for index {idx.name} to be ready...") - timeout = 60 - while True and timeout > 0: - is_ready = pc.db.index.describe(name=idx.name).ready - if is_ready: - break - time.sleep(1) - timeout -= 1 - if timeout <= 0: - logger.warning(f"Index {idx.name} did not become ready in time") - else: - logger.info(f"Deletion protection is already disabled for index: {idx.name}") - - for idx in test_indexes: - try: - logger.info(f"Deleting index: {idx.name}") - pc.db.index.delete(name=idx.name, timeout=-1) - except Exception as e: - logger.warning(f"Failed to delete index {idx.name}: {str(e)}") - - except Exception as e: - logger.error(f"Error during final cleanup: {str(e)}") - - logger.info("Final cleanup completed") diff --git a/tests/integration/control/backup/__init__.py b/tests/integration/control/resources/__init__.py similarity index 100% rename from tests/integration/control/backup/__init__.py rename to tests/integration/control/resources/__init__.py diff --git a/tests/integration/control/collections/__init__.py b/tests/integration/control/resources/backup/__init__.py similarity index 100% rename from tests/integration/control/collections/__init__.py rename to tests/integration/control/resources/backup/__init__.py diff --git a/tests/integration/control/backup/test_backup.py b/tests/integration/control/resources/backup/test_backup.py similarity index 92% rename from tests/integration/control/backup/test_backup.py rename to tests/integration/control/resources/backup/test_backup.py index 6873c414b..f61df1e5b 100644 --- a/tests/integration/control/backup/test_backup.py +++ b/tests/integration/control/resources/backup/test_backup.py @@ -1,14 +1,15 @@ import pytest import random -from ...helpers import random_string, poll_stats_for_namespace +from ....helpers import random_string, poll_stats_for_namespace import logging import time +from pinecone import Pinecone logger = logging.getLogger(__name__) class TestBackups: - def test_create_backup(self, pc, ready_sl_index, index_tags): + def test_create_backup(self, pc: Pinecone, ready_sl_index, index_tags): desc = pc.db.index.describe(name=ready_sl_index) dimension = desc.dimension @@ -78,8 +79,9 @@ def test_create_backup(self, pc, ready_sl_index, index_tags): assert new_index.metric == desc.metric # Can list restore jobs - restore_jobs = pc.db.restore_job.list(index_name=new_index_name) - assert len(restore_jobs) == 1 + logger.info("Listing restore jobs") + restore_jobs = pc.db.restore_job.list() + assert len(restore_jobs) >= 1, f"Expected at least one restore job, got {len(restore_jobs)}" # Verify that the new index has the same data as the original index new_idx = pc.Index(name=new_index_name) @@ -97,11 +99,7 @@ def test_create_backup(self, pc, ready_sl_index, index_tags): with pytest.raises(Exception): pc.db.backup.describe(backup_id=backup.backup_id) - # Verify that the new index is deleted - backup_list = pc.db.backup.list() - assert len(backup_list) == 0 - - def test_create_backup_legacy_syntax(self, pc, ready_sl_index, index_tags): + def test_create_backup_legacy_syntax(self, pc: Pinecone, ready_sl_index, index_tags): desc = pc.describe_index(name=ready_sl_index) dimension = desc.dimension @@ -171,8 +169,8 @@ def test_create_backup_legacy_syntax(self, pc, ready_sl_index, index_tags): assert new_index.metric == desc.metric # Can list restore jobs - restore_jobs = pc.list_restore_jobs(index_name=new_index_name) - assert len(restore_jobs) == 1 + restore_jobs = pc.list_restore_jobs() + assert len(restore_jobs) >= 1 # Verify that the new index has the same data as the original index new_idx = pc.Index(name=new_index_name) diff --git a/tests/integration/control/index/__init__.py b/tests/integration/control/resources/collections/__init__.py similarity index 100% rename from tests/integration/control/index/__init__.py rename to tests/integration/control/resources/collections/__init__.py diff --git a/tests/integration/control/collections/helpers.py b/tests/integration/control/resources/collections/helpers.py similarity index 100% rename from tests/integration/control/collections/helpers.py rename to tests/integration/control/resources/collections/helpers.py diff --git a/tests/integration/control/collections/test_dense_index.py b/tests/integration/control/resources/collections/test_dense_index.py similarity index 99% rename from tests/integration/control/collections/test_dense_index.py rename to tests/integration/control/resources/collections/test_dense_index.py index 58ad0832e..6c76a9622 100644 --- a/tests/integration/control/collections/test_dense_index.py +++ b/tests/integration/control/resources/collections/test_dense_index.py @@ -1,6 +1,6 @@ import time from pinecone import PodSpec -from ...helpers import generate_index_name, generate_collection_name +from ....helpers import generate_index_name, generate_collection_name import logging from .helpers import attempt_cleanup_collection, attempt_cleanup_index, random_vector diff --git a/tests/integration/control/resources/conftest.py b/tests/integration/control/resources/conftest.py new file mode 100644 index 000000000..93060a66f --- /dev/null +++ b/tests/integration/control/resources/conftest.py @@ -0,0 +1,66 @@ +import os +import pytest +import uuid +import logging +import dotenv +from pinecone import Pinecone, PodIndexEnvironment +from ...helpers import delete_indexes_from_run, delete_backups_from_run, default_create_index_params + +dotenv.load_dotenv() + +logger = logging.getLogger(__name__) +""" @private """ + +# Generate a unique ID for the entire test run +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture() +def pc(): + return Pinecone() + + +@pytest.fixture() +def create_index_params(request): + return default_create_index_params(request, RUN_ID) + + +@pytest.fixture() +def index_name(create_index_params): + return create_index_params["name"] + + +@pytest.fixture() +def index_tags(create_index_params): + return create_index_params["tags"] + + +@pytest.fixture +def pod_environment(): + return os.getenv("POD_ENVIRONMENT", PodIndexEnvironment.US_EAST1_AWS.value) + + +@pytest.fixture() +def ready_sl_index(pc, index_name, create_index_params): + create_index_params["timeout"] = None + pc.create_index(**create_index_params) + yield index_name + pc.db.index.delete(name=index_name, timeout=-1) + + +@pytest.fixture() +def notready_sl_index(pc, index_name, create_index_params): + pc.create_index(**create_index_params, timeout=-1) + yield index_name + + +def pytest_sessionfinish(session, exitstatus): + """ + Hook that runs after all tests have completed. + This is a good place to clean up any resources that were created during the test session. + """ + logger.info("Running final cleanup after all tests...") + + pc = Pinecone() + delete_indexes_from_run(pc, RUN_ID) + delete_backups_from_run(pc, RUN_ID) diff --git a/tests/integration/control/restore_job/__init__.py b/tests/integration/control/resources/index/__init__.py similarity index 100% rename from tests/integration/control/restore_job/__init__.py rename to tests/integration/control/resources/index/__init__.py diff --git a/tests/integration/control/index/test_configure.py b/tests/integration/control/resources/index/test_configure.py similarity index 100% rename from tests/integration/control/index/test_configure.py rename to tests/integration/control/resources/index/test_configure.py diff --git a/tests/integration/control/index/test_create.py b/tests/integration/control/resources/index/test_create.py similarity index 77% rename from tests/integration/control/index/test_create.py rename to tests/integration/control/resources/index/test_create.py index a3aa44062..1591ecd52 100644 --- a/tests/integration/control/index/test_create.py +++ b/tests/integration/control/resources/index/test_create.py @@ -59,10 +59,10 @@ def test_create_infinite_wait(self, pc, index_name): assert resp.metric == "cosine" @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) - def test_create_default_index_with_metric(self, pc, create_sl_index_params, metric): - create_sl_index_params["metric"] = metric - pc.db.index.create(**create_sl_index_params) - desc = pc.db.index.describe(create_sl_index_params["name"]) + def test_create_default_index_with_metric(self, pc, create_index_params, metric): + create_index_params["metric"] = metric + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(create_index_params["name"]) if isinstance(metric, str): assert desc.metric == metric else: @@ -105,19 +105,19 @@ def test_create_with_enum_values( assert desc.tags.to_dict() == tags @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) - def test_create_dense_index_with_metric(self, pc, create_sl_index_params, metric): - create_sl_index_params["metric"] = metric - create_sl_index_params["vector_type"] = VectorType.DENSE - pc.db.index.create(**create_sl_index_params) - desc = pc.db.index.describe(create_sl_index_params["name"]) + def test_create_dense_index_with_metric(self, pc, create_index_params, metric): + create_index_params["metric"] = metric + create_index_params["vector_type"] = VectorType.DENSE + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(create_index_params["name"]) assert desc.metric == metric assert desc.vector_type == "dense" - def test_create_with_optional_tags(self, pc, create_sl_index_params): + def test_create_with_optional_tags(self, pc, create_index_params): tags = {"foo": "FOO", "bar": "BAR"} - create_sl_index_params["tags"] = tags - pc.db.index.create(**create_sl_index_params) - desc = pc.db.index.describe(create_sl_index_params["name"]) + create_index_params["tags"] = tags + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(create_index_params["name"]) assert desc.tags.to_dict() == tags @@ -211,76 +211,74 @@ def test_pod_index_does_not_support_sparse_vectors(self, pc, index_name, index_t class TestCreateServerlessIndexApiErrorCases: - def test_create_index_with_invalid_name(self, pc, create_sl_index_params): - create_sl_index_params["name"] = "Invalid-name" + def test_create_index_with_invalid_name(self, pc, create_index_params): + create_index_params["name"] = "Invalid-name" with pytest.raises(PineconeApiException): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) - def test_create_index_invalid_metric(self, pc, create_sl_index_params): - create_sl_index_params["metric"] = "invalid" + def test_create_index_invalid_metric(self, pc, create_index_params): + create_index_params["metric"] = "invalid" with pytest.raises(PineconeApiValueError): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) - def test_create_index_with_invalid_neg_dimension(self, pc, create_sl_index_params): - create_sl_index_params["dimension"] = -1 + def test_create_index_with_invalid_neg_dimension(self, pc, create_index_params): + create_index_params["dimension"] = -1 with pytest.raises(PineconeApiValueError): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) - def test_create_index_that_already_exists(self, pc, create_sl_index_params): - pc.db.index.create(**create_sl_index_params) + def test_create_index_that_already_exists(self, pc, create_index_params): + pc.db.index.create(**create_index_params) with pytest.raises(PineconeApiException): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) class TestCreateServerlessIndexWithTimeout: - def test_create_index_default_timeout(self, pc, create_sl_index_params): - create_sl_index_params["timeout"] = None - pc.db.index.create(**create_sl_index_params) + def test_create_index_default_timeout(self, pc, create_index_params): + create_index_params["timeout"] = None + pc.db.index.create(**create_index_params) # Waits infinitely for index to be ready - desc = pc.db.index.describe(create_sl_index_params["name"]) + desc = pc.db.index.describe(create_index_params["name"]) assert desc.status.ready == True - def test_create_index_when_timeout_set(self, pc, create_sl_index_params): - create_sl_index_params["timeout"] = ( + def test_create_index_when_timeout_set(self, pc, create_index_params): + create_index_params["timeout"] = ( 1000 # effectively infinite, but different code path from None ) - pc.db.index.create(**create_sl_index_params) - desc = pc.db.index.describe(name=create_sl_index_params["name"]) + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(name=create_index_params["name"]) assert desc.status.ready == True - def test_create_index_with_negative_timeout(self, pc, create_sl_index_params): - create_sl_index_params["timeout"] = -1 - pc.db.index.create(**create_sl_index_params) - desc = pc.db.index.describe(create_sl_index_params["name"]) + def test_create_index_with_negative_timeout(self, pc, create_index_params): + create_index_params["timeout"] = -1 + pc.db.index.create(**create_index_params) + desc = pc.db.index.describe(create_index_params["name"]) # Returns immediately without waiting for index to be ready assert desc.status.ready in [False, True] class TestCreateIndexTypeErrorCases: - def test_create_index_with_invalid_str_dimension(self, pc, create_sl_index_params): - create_sl_index_params["dimension"] = "10" + def test_create_index_with_invalid_str_dimension(self, pc, create_index_params): + create_index_params["dimension"] = "10" with pytest.raises(PineconeApiTypeError): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) - def test_create_index_with_missing_dimension(self, pc, create_sl_index_params): - del create_sl_index_params["dimension"] + def test_create_index_with_missing_dimension(self, pc, create_index_params): + del create_index_params["dimension"] with pytest.raises(PineconeApiException): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) - def test_create_index_w_incompatible_options(self, pc, create_sl_index_params): - create_sl_index_params["pod_type"] = "p1.x2" - create_sl_index_params["environment"] = "us-east1-gcp" - create_sl_index_params["replicas"] = 2 + def test_create_index_w_incompatible_options(self, pc, create_index_params): + create_index_params["pod_type"] = "p1.x2" + create_index_params["environment"] = "us-east1-gcp" + create_index_params["replicas"] = 2 with pytest.raises(TypeError): - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) @pytest.mark.parametrize("required_option", ["name", "spec", "dimension"]) - def test_create_with_missing_required_options( - self, pc, create_sl_index_params, required_option - ): - del create_sl_index_params[required_option] + def test_create_with_missing_required_options(self, pc, create_index_params, required_option): + del create_index_params[required_option] with pytest.raises(Exception) as e: - pc.db.index.create(**create_sl_index_params) + pc.db.index.create(**create_index_params) assert required_option.lower() in str(e.value).lower() diff --git a/tests/integration/control/index/test_delete.py b/tests/integration/control/resources/index/test_delete.py similarity index 100% rename from tests/integration/control/index/test_delete.py rename to tests/integration/control/resources/index/test_delete.py diff --git a/tests/integration/control/index/test_describe.py b/tests/integration/control/resources/index/test_describe.py similarity index 65% rename from tests/integration/control/index/test_describe.py rename to tests/integration/control/resources/index/test_describe.py index df7f5896c..276176bf1 100644 --- a/tests/integration/control/index/test_describe.py +++ b/tests/integration/control/resources/index/test_describe.py @@ -2,20 +2,20 @@ class TestDescribeIndex: - def test_describe_index_when_ready(self, pc, ready_sl_index, create_sl_index_params): + def test_describe_index_when_ready(self, pc, ready_sl_index, create_index_params): description = pc.db.index.describe(ready_sl_index) assert isinstance(description, IndexModel) assert description.name == ready_sl_index - assert description.dimension == create_sl_index_params["dimension"] - assert description.metric == create_sl_index_params["metric"] + assert description.dimension == create_index_params["dimension"] + assert description.metric == create_index_params["metric"] assert ( description.spec.serverless["cloud"] - == create_sl_index_params["spec"]["serverless"]["cloud"] + == create_index_params["spec"]["serverless"]["cloud"] ) assert ( description.spec.serverless["region"] - == create_sl_index_params["spec"]["serverless"]["region"] + == create_index_params["spec"]["serverless"]["region"] ) assert isinstance(description.host, str) @@ -25,20 +25,20 @@ def test_describe_index_when_ready(self, pc, ready_sl_index, create_sl_index_par assert description.status.state == "Ready" assert description.status.ready == True - def test_describe_index_when_not_ready(self, pc, notready_sl_index, create_sl_index_params): + def test_describe_index_when_not_ready(self, pc, notready_sl_index, create_index_params): description = pc.db.index.describe(notready_sl_index) assert isinstance(description, IndexModel) assert description.name == notready_sl_index - assert description.dimension == create_sl_index_params["dimension"] - assert description.metric == create_sl_index_params["metric"] + assert description.dimension == create_index_params["dimension"] + assert description.metric == create_index_params["metric"] assert ( description.spec.serverless["cloud"] - == create_sl_index_params["spec"]["serverless"]["cloud"] + == create_index_params["spec"]["serverless"]["cloud"] ) assert ( description.spec.serverless["region"] - == create_sl_index_params["spec"]["serverless"]["region"] + == create_index_params["spec"]["serverless"]["region"] ) assert isinstance(description.host, str) diff --git a/tests/integration/control/index/test_has.py b/tests/integration/control/resources/index/test_has.py similarity index 63% rename from tests/integration/control/index/test_has.py rename to tests/integration/control/resources/index/test_has.py index 1a356a996..8f55766f8 100644 --- a/tests/integration/control/index/test_has.py +++ b/tests/integration/control/resources/index/test_has.py @@ -1,10 +1,10 @@ -from tests.integration.helpers import random_string +from ....helpers import random_string class TestHasIndex: - def test_index_exists_success(self, pc, create_sl_index_params): - name = create_sl_index_params["name"] - pc.db.index.create(**create_sl_index_params) + def test_index_exists_success(self, pc, create_index_params): + name = create_index_params["name"] + pc.db.index.create(**create_index_params) has_index = pc.db.index.has(name) assert has_index == True diff --git a/tests/integration/control/index/test_list.py b/tests/integration/control/resources/index/test_list.py similarity index 84% rename from tests/integration/control/index/test_list.py rename to tests/integration/control/resources/index/test_list.py index 4e217ea55..e45d15b9b 100644 --- a/tests/integration/control/index/test_list.py +++ b/tests/integration/control/resources/index/test_list.py @@ -2,7 +2,7 @@ class TestListIndexes: - def test_list_indexes_includes_ready_indexes(self, pc, ready_sl_index, create_sl_index_params): + def test_list_indexes_includes_ready_indexes(self, pc, ready_sl_index, create_index_params): list_response = pc.db.index.list() assert len(list_response.indexes) != 0 assert isinstance(list_response.indexes[0], IndexModel) @@ -11,8 +11,8 @@ def test_list_indexes_includes_ready_indexes(self, pc, ready_sl_index, create_sl 0 ] assert created_index.name == ready_sl_index - assert created_index.dimension == create_sl_index_params["dimension"] - assert created_index.metric == create_sl_index_params["metric"] + assert created_index.dimension == create_index_params["dimension"] + assert created_index.metric == create_index_params["metric"] assert ready_sl_index in created_index.host def test_list_indexes_includes_not_ready_indexes(self, pc, notready_sl_index): diff --git a/tests/integration/control_asyncio/backup/__init__.py b/tests/integration/control/resources/restore_job/__init__.py similarity index 100% rename from tests/integration/control_asyncio/backup/__init__.py rename to tests/integration/control/resources/restore_job/__init__.py diff --git a/tests/integration/control/restore_job/test_describe.py b/tests/integration/control/resources/restore_job/test_describe.py similarity index 100% rename from tests/integration/control/restore_job/test_describe.py rename to tests/integration/control/resources/restore_job/test_describe.py diff --git a/tests/integration/control/restore_job/test_list.py b/tests/integration/control/resources/restore_job/test_list.py similarity index 100% rename from tests/integration/control/restore_job/test_list.py rename to tests/integration/control/resources/restore_job/test_list.py diff --git a/tests/integration/control/restore_job/conftest.py b/tests/integration/control/restore_job/conftest.py deleted file mode 100644 index 9798da273..000000000 --- a/tests/integration/control/restore_job/conftest.py +++ /dev/null @@ -1,168 +0,0 @@ -import pytest -import uuid -import time -import logging -import dotenv -from pinecone import Pinecone, NotFoundException, PineconeApiException -from ...helpers import generate_index_name, get_environment_var, index_tags as index_tags_helper - -dotenv.load_dotenv() - -logger = logging.getLogger(__name__) -""" @private """ - -# Generate a unique ID for the entire test run -RUN_ID = str(uuid.uuid4()) - - -@pytest.fixture() -def index_tags(request): - return index_tags_helper(request, RUN_ID) - - -@pytest.fixture() -def pc(): - api_key = get_environment_var("PINECONE_API_KEY") - return Pinecone( - api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"} - ) - - -@pytest.fixture() -def serverless_cloud(): - return get_environment_var("SERVERLESS_CLOUD", "aws") - - -@pytest.fixture() -def serverless_region(): - return get_environment_var("SERVERLESS_REGION", "us-west-2") - - -@pytest.fixture() -def create_sl_index_params(index_name, serverless_cloud, serverless_region, index_tags): - spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - return dict(name=index_name, dimension=10, metric="cosine", spec=spec, tags=index_tags) - - -@pytest.fixture() -def index_name(request): - test_name = request.node.name - return generate_index_name(test_name) - - -@pytest.fixture() -def ready_sl_index(pc, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = None - pc.create_index(**create_sl_index_params) - yield index_name - pc.db.index.delete(name=index_name, timeout=-1) - - -def delete_with_retry(pc, index_name, retries=0, sleep_interval=5): - logger.debug( - "Deleting index " - + index_name - + ", retry " - + str(retries) - + ", next sleep interval " - + str(sleep_interval) - ) - try: - pc.db.index.delete(name=index_name, timeout=-1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise Exception("Unable to delete index " + index_name) - time.sleep(sleep_interval) - delete_with_retry(pc, index_name, retries + 1, sleep_interval * 2) - else: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - except Exception as e: - logger.error(e.__class__) - logger.error(e) - raise Exception("Unable to delete index " + index_name) - - -@pytest.fixture(autouse=True) -def cleanup(pc, index_name): - yield - - try: - desc = pc.db.index.describe(name=index_name) - if desc.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {index_name}") - pc.db.index.configure(name=index_name, deletion_protection="disabled") - logger.debug("Attempting to delete index with name: " + index_name) - pc.db.index.delete(name=index_name, timeout=-1) - except Exception: - pass - - for backup in pc.db.backup.list(): - logger.debug(f"Deleting backup: {backup.name}") - try: - pc.db.backup.delete(backup_id=backup.backup_id) - except Exception as e: - logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") - - -def pytest_sessionfinish(session, exitstatus): - """ - Hook that runs after all tests have completed. - This is a good place to clean up any resources that were created during the test session. - """ - logger.info("Running final cleanup after all tests...") - - try: - pc = Pinecone() - indexes = pc.db.index.list() - test_indexes = [ - idx for idx in indexes if idx.tags is not None and idx.tags.get("test-run") == RUN_ID - ] - - logger.info(f"Indexes to delete: {[idx.name for idx in test_indexes]}") - - for idx in test_indexes: - if idx.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {idx.name}") - pc.db.index.configure(name=idx.name, deletion_protection="disabled") - # Wait for index to be updated with status ready - logger.info(f"Waiting for index {idx.name} to be ready...") - timeout = 60 - while True and timeout > 0: - is_ready = pc.db.index.describe(name=idx.name).ready - if is_ready: - break - time.sleep(1) - timeout -= 1 - if timeout <= 0: - logger.warning(f"Index {idx.name} did not become ready in time") - else: - logger.info(f"Deletion protection is already disabled for index: {idx.name}") - - for idx in test_indexes: - try: - logger.info(f"Deleting index: {idx.name}") - pc.db.index.delete(name=idx.name, timeout=-1) - except Exception as e: - logger.warning(f"Failed to delete index {idx.name}: {str(e)}") - - backups = pc.db.backup.list() - if len(backups) > 0: - logger.info(f"Deleting {len(backups)} backups") - for backup in backups: - logger.debug(f"Deleting backup: {backup.name}") - try: - pc.db.backup.delete(backup_id=backup.backup_id) - except Exception as e: - logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") - else: - logger.info("No backups to delete") - - except Exception as e: - logger.error(f"Error during final cleanup: {str(e)}") - - logger.info("Final cleanup completed") diff --git a/tests/integration/control_asyncio/backup/conftest.py b/tests/integration/control_asyncio/backup/conftest.py deleted file mode 100644 index 3a7a56078..000000000 --- a/tests/integration/control_asyncio/backup/conftest.py +++ /dev/null @@ -1,220 +0,0 @@ -import pytest -import time -import random -import asyncio -import uuid -from ...helpers import get_environment_var, generate_index_name, index_tags as index_tags_helper -import logging -from typing import Callable, Optional, Awaitable, Union - -from pinecone import ( - CloudProvider, - AwsRegion, - ServerlessSpec, - PineconeApiException, - NotFoundException, -) - -logger = logging.getLogger(__name__) -""" @private """ - -# Generate a unique ID for the entire test run -RUN_ID = str(uuid.uuid4()) - - -@pytest.fixture() -def index_tags(request): - return index_tags_helper(request, RUN_ID) - - -logger = logging.getLogger(__name__) - - -def build_client(): - from pinecone import PineconeAsyncio - - return PineconeAsyncio() - - -@pytest.fixture(scope="session") -def client(): - # This returns the sync client. Not for use in tests - # but can be used to help with cleanup after test runs - from pinecone import Pinecone - - return Pinecone() - - -async def poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): - max_wait_time = 60 * 3 # 3 minutes - time_waited = 0 - wait_per_iteration = 5 - - while True: - stats = await asyncio_idx.describe_index_stats() - logger.debug( - "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", - asyncio_idx, - stats.total_vector_count, - target_vector_count, - ) - if target_namespace == "": - if stats.total_vector_count >= target_vector_count: - break - else: - if ( - target_namespace in stats.namespaces - and stats.namespaces[target_namespace].vector_count >= target_vector_count - ): - break - time_waited += wait_per_iteration - if time_waited >= max_wait_time: - raise TimeoutError( - "Timeout waiting for index to have expected vector count of {}".format( - target_vector_count - ) - ) - await asyncio.sleep(wait_per_iteration) - - return stats - - -async def wait_until( - condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], - timeout: Optional[float] = 10.0, - interval: float = 0.1, -) -> None: - """ - Waits asynchronously until the given (async or sync) condition returns True or times out. - - Args: - condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. - timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. - interval: Time in seconds between checks of the condition. - - Raises: - asyncio.TimeoutError: If the condition is not met within the timeout period. - """ - start_time = asyncio.get_event_loop().time() - - while True: - result = await condition() if asyncio.iscoroutinefunction(condition) else condition() - if result: - return - - if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: - raise asyncio.TimeoutError("Condition not met within the timeout period.") - - remaining_time = ( - (start_time + timeout) - asyncio.get_event_loop().time() - if timeout is not None - else None - ) - logger.debug( - "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", - interval, - remaining_time, - ) - await asyncio.sleep(interval) - - -@pytest.fixture() -def serverless_cloud(): - return get_environment_var("SERVERLESS_CLOUD", "aws") - - -@pytest.fixture() -def serverless_region(): - return get_environment_var("SERVERLESS_REGION", "us-west-2") - - -@pytest.fixture() -def spec1(serverless_cloud, serverless_region): - return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - - -@pytest.fixture() -def spec2(): - return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) - - -@pytest.fixture() -def spec3(): - return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} - - -@pytest.fixture() -def create_sl_index_params(index_name, serverless_cloud, serverless_region): - spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - return dict(name=index_name, dimension=10, metric="cosine", spec=spec) - - -@pytest.fixture() -def random_vector(): - return [random.uniform(0, 1) for _ in range(10)] - - -@pytest.fixture() -def index_name(request): - test_name = request.node.name - return generate_index_name(test_name) - - -@pytest.fixture() -def ready_sl_index(client, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = None - client.create_index(**create_sl_index_params) - yield index_name - client.delete_index(index_name, -1) - - -@pytest.fixture() -def notready_sl_index(client, index_name, create_sl_index_params): - client.create_index(**create_sl_index_params, timeout=-1) - yield index_name - - -def delete_with_retry(client, index_name, retries=0, sleep_interval=5): - logger.info( - f"Deleting index {index_name}, retry {retries}, next sleep interval {sleep_interval}" - ) - try: - client.delete_index(index_name, -1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise "Unable to delete index " + index_name - time.sleep(sleep_interval) - delete_with_retry(client, index_name, retries + 1, sleep_interval * 2) - else: - print(e.__class__) - print(e) - raise "Unable to delete index " + index_name - except Exception as e: - logger.warning(f"Failed to delete index: {index_name}: {str(e)}") - raise "Unable to delete index " + index_name - - -@pytest.fixture(autouse=True) -async def cleanup(client, index_name): - yield - - try: - desc = client.index.describe(name=index_name) - if desc.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {index_name}") - client.index.configure(name=index_name, deletion_protection="disabled") - logger.debug("Attempting to delete index with name: " + index_name) - client.index.delete(name=index_name, timeout=-1) - except Exception as e: - logger.warning(f"Failed to delete index: {index_name}: {str(e)}") - pass - - for backup in client.db.backup.list(): - logger.debug(f"Deleting backup: {backup.name}") - try: - client.db.backup.delete(backup_id=backup.backup_id) - except Exception as e: - logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") diff --git a/tests/integration/control_asyncio/conftest.py b/tests/integration/control_asyncio/conftest.py index 33c2b529d..acbcce0bb 100644 --- a/tests/integration/control_asyncio/conftest.py +++ b/tests/integration/control_asyncio/conftest.py @@ -1,10 +1,8 @@ import pytest import time import random -import asyncio from ..helpers import get_environment_var, generate_index_name import logging -from typing import Callable, Optional, Awaitable, Union from pinecone import ( CloudProvider, @@ -37,79 +35,6 @@ def build_pc(): return build_client -async def poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): - max_wait_time = 60 * 3 # 3 minutes - time_waited = 0 - wait_per_iteration = 5 - - while True: - stats = await asyncio_idx.describe_index_stats() - logger.debug( - "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", - asyncio_idx, - stats.total_vector_count, - target_vector_count, - ) - if target_namespace == "": - if stats.total_vector_count >= target_vector_count: - break - else: - if ( - target_namespace in stats.namespaces - and stats.namespaces[target_namespace].vector_count >= target_vector_count - ): - break - time_waited += wait_per_iteration - if time_waited >= max_wait_time: - raise TimeoutError( - "Timeout waiting for index to have expected vector count of {}".format( - target_vector_count - ) - ) - await asyncio.sleep(wait_per_iteration) - - return stats - - -async def wait_until( - condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], - timeout: Optional[float] = 10.0, - interval: float = 0.1, -) -> None: - """ - Waits asynchronously until the given (async or sync) condition returns True or times out. - - Args: - condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. - timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. - interval: Time in seconds between checks of the condition. - - Raises: - asyncio.TimeoutError: If the condition is not met within the timeout period. - """ - start_time = asyncio.get_event_loop().time() - - while True: - result = await condition() if asyncio.iscoroutinefunction(condition) else condition() - if result: - return - - if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: - raise asyncio.TimeoutError("Condition not met within the timeout period.") - - remaining_time = ( - (start_time + timeout) - asyncio.get_event_loop().time() - if timeout is not None - else None - ) - logger.debug( - "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", - interval, - remaining_time, - ) - await asyncio.sleep(interval) - - @pytest.fixture() def serverless_cloud(): return get_environment_var("SERVERLESS_CLOUD", "aws") diff --git a/tests/integration/control_asyncio/index/conftest.py b/tests/integration/control_asyncio/index/conftest.py deleted file mode 100644 index ea17bc588..000000000 --- a/tests/integration/control_asyncio/index/conftest.py +++ /dev/null @@ -1,206 +0,0 @@ -import pytest -import time -import random -import asyncio -from ...helpers import get_environment_var, generate_index_name -import logging -from typing import Callable, Optional, Awaitable, Union - -from pinecone import ( - CloudProvider, - AwsRegion, - ServerlessSpec, - PineconeApiException, - NotFoundException, -) - -logger = logging.getLogger(__name__) - - -def build_client(): - from pinecone import PineconeAsyncio - - return PineconeAsyncio() - - -@pytest.fixture(scope="session") -def client(): - # This returns the sync client. Not for use in tests - # but can be used to help with cleanup after test runs - from pinecone import Pinecone - - return Pinecone() - - -@pytest.fixture(scope="session") -def build_pc(): - return build_client - - -async def poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): - max_wait_time = 60 * 3 # 3 minutes - time_waited = 0 - wait_per_iteration = 5 - - while True: - stats = await asyncio_idx.describe_index_stats() - logger.debug( - "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", - asyncio_idx, - stats.total_vector_count, - target_vector_count, - ) - if target_namespace == "": - if stats.total_vector_count >= target_vector_count: - break - else: - if ( - target_namespace in stats.namespaces - and stats.namespaces[target_namespace].vector_count >= target_vector_count - ): - break - time_waited += wait_per_iteration - if time_waited >= max_wait_time: - raise TimeoutError( - "Timeout waiting for index to have expected vector count of {}".format( - target_vector_count - ) - ) - await asyncio.sleep(wait_per_iteration) - - return stats - - -async def wait_until( - condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], - timeout: Optional[float] = 10.0, - interval: float = 0.1, -) -> None: - """ - Waits asynchronously until the given (async or sync) condition returns True or times out. - - Args: - condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. - timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. - interval: Time in seconds between checks of the condition. - - Raises: - asyncio.TimeoutError: If the condition is not met within the timeout period. - """ - start_time = asyncio.get_event_loop().time() - - while True: - result = await condition() if asyncio.iscoroutinefunction(condition) else condition() - if result: - return - - if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: - raise asyncio.TimeoutError("Condition not met within the timeout period.") - - remaining_time = ( - (start_time + timeout) - asyncio.get_event_loop().time() - if timeout is not None - else None - ) - logger.debug( - "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", - interval, - remaining_time, - ) - await asyncio.sleep(interval) - - -@pytest.fixture() -def serverless_cloud(): - return get_environment_var("SERVERLESS_CLOUD", "aws") - - -@pytest.fixture() -def serverless_region(): - return get_environment_var("SERVERLESS_REGION", "us-west-2") - - -@pytest.fixture() -def spec1(serverless_cloud, serverless_region): - return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - - -@pytest.fixture() -def spec2(): - return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) - - -@pytest.fixture() -def spec3(): - return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} - - -@pytest.fixture() -def create_sl_index_params(index_name, serverless_cloud, serverless_region): - spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - return dict(name=index_name, dimension=10, metric="cosine", spec=spec) - - -@pytest.fixture() -def random_vector(): - return [random.uniform(0, 1) for _ in range(10)] - - -@pytest.fixture() -def index_name(request): - test_name = request.node.name - return generate_index_name(test_name) - - -@pytest.fixture() -def ready_sl_index(client, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = None - client.create_index(**create_sl_index_params) - yield index_name - client.delete_index(index_name, -1) - - -@pytest.fixture() -def notready_sl_index(client, index_name, create_sl_index_params): - client.create_index(**create_sl_index_params, timeout=-1) - yield index_name - - -def delete_with_retry(client, index_name, retries=0, sleep_interval=5): - print( - "Deleting index " - + index_name - + ", retry " - + str(retries) - + ", next sleep interval " - + str(sleep_interval) - ) - try: - client.delete_index(index_name, -1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise "Unable to delete index " + index_name - time.sleep(sleep_interval) - delete_with_retry(client, index_name, retries + 1, sleep_interval * 2) - else: - print(e.__class__) - print(e) - raise "Unable to delete index " + index_name - except Exception as e: - print(e.__class__) - print(e) - raise "Unable to delete index " + index_name - - -@pytest.fixture(autouse=True) -async def cleanup(client, index_name): - yield - - try: - logger.debug("Attempting to delete index with name: " + index_name) - client.index.delete(name=index_name, timeout=-1) - except Exception: - pass diff --git a/tests/integration/control_asyncio/index/__init__.py b/tests/integration/control_asyncio/resources/__init__.py similarity index 100% rename from tests/integration/control_asyncio/index/__init__.py rename to tests/integration/control_asyncio/resources/__init__.py diff --git a/tests/integration/control_asyncio/restore_job/__init__.py b/tests/integration/control_asyncio/resources/backup/__init__.py similarity index 100% rename from tests/integration/control_asyncio/restore_job/__init__.py rename to tests/integration/control_asyncio/resources/backup/__init__.py diff --git a/tests/integration/control_asyncio/backup/test_backup.py b/tests/integration/control_asyncio/resources/backup/test_backup.py similarity index 97% rename from tests/integration/control_asyncio/backup/test_backup.py rename to tests/integration/control_asyncio/resources/backup/test_backup.py index 47a67c546..76b720f45 100644 --- a/tests/integration/control_asyncio/backup/test_backup.py +++ b/tests/integration/control_asyncio/resources/backup/test_backup.py @@ -1,7 +1,7 @@ import pytest import random import asyncio -from ...helpers import random_string +from ....helpers import random_string import logging from pinecone import PineconeAsyncio @@ -103,10 +103,6 @@ async def test_create_backup(self, ready_sl_index, index_tags): with pytest.raises(Exception): await pc.db.backup.describe(backup_id=backup.backup_id) - # Verify that the new index is deleted - backup_list = await pc.db.backup.list() - assert len(backup_list) == 0 - async def test_create_backup_legacy_syntax(self, ready_sl_index, index_tags): async with PineconeAsyncio() as pc: desc = await pc.describe_index(name=ready_sl_index) diff --git a/tests/integration/control_asyncio/resources/conftest.py b/tests/integration/control_asyncio/resources/conftest.py new file mode 100644 index 000000000..f7135575f --- /dev/null +++ b/tests/integration/control_asyncio/resources/conftest.py @@ -0,0 +1,66 @@ +import pytest +import uuid +import logging +import dotenv +import os +from pinecone import Pinecone, PodIndexEnvironment +from ...helpers import delete_indexes_from_run, delete_backups_from_run, default_create_index_params + +dotenv.load_dotenv() + +logger = logging.getLogger(__name__) +""" @private """ + +# Generate a unique ID for the entire test run +RUN_ID = str(uuid.uuid4()) + + +@pytest.fixture() +def pc(): + return Pinecone() + + +@pytest.fixture +def pod_environment(): + return os.getenv("POD_ENVIRONMENT", PodIndexEnvironment.US_EAST1_AWS.value) + + +@pytest.fixture() +def create_index_params(request): + return default_create_index_params(request, RUN_ID) + + +@pytest.fixture() +def index_name(create_index_params): + return create_index_params["name"] + + +@pytest.fixture() +def index_tags(create_index_params): + return create_index_params["tags"] + + +@pytest.fixture() +def ready_sl_index(pc, index_name, create_index_params): + create_index_params["timeout"] = None + pc.create_index(**create_index_params) + yield index_name + pc.db.index.delete(name=index_name, timeout=-1) + + +@pytest.fixture() +def notready_sl_index(pc, index_name, create_index_params): + pc.create_index(**create_index_params, timeout=-1) + yield index_name + + +def pytest_sessionfinish(session, exitstatus): + """ + Hook that runs after all tests have completed. + This is a good place to clean up any resources that were created during the test session. + """ + logger.info("Running final cleanup after all tests...") + + pc = Pinecone() + delete_indexes_from_run(pc, RUN_ID) + delete_backups_from_run(pc, RUN_ID) diff --git a/tests/integration/control_asyncio/resources/index/__init__.py b/tests/integration/control_asyncio/resources/index/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/integration/control_asyncio/resources/index/conftest.py b/tests/integration/control_asyncio/resources/index/conftest.py new file mode 100644 index 000000000..de50f077a --- /dev/null +++ b/tests/integration/control_asyncio/resources/index/conftest.py @@ -0,0 +1,18 @@ +import pytest + +from pinecone import CloudProvider, AwsRegion, ServerlessSpec + + +@pytest.fixture() +def spec1(serverless_cloud, serverless_region): + return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + + +@pytest.fixture() +def spec2(): + return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) + + +@pytest.fixture() +def spec3(): + return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} diff --git a/tests/integration/control_asyncio/index/test_create.py b/tests/integration/control_asyncio/resources/index/test_create.py similarity index 74% rename from tests/integration/control_asyncio/index/test_create.py rename to tests/integration/control_asyncio/resources/index/test_create.py index b85cfebcf..5a64c3549 100644 --- a/tests/integration/control_asyncio/index/test_create.py +++ b/tests/integration/control_asyncio/resources/index/test_create.py @@ -49,11 +49,13 @@ async def test_create_infinite_wait(self, index_name, spec1): assert resp.metric == "cosine" @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) - async def test_create_default_index_with_metric(self, index_name, metric, spec1): + async def test_create_default_index_with_metric(self, index_name, metric, spec1, index_tags): pc = PineconeAsyncio() - await pc.db.index.create(name=index_name, dimension=10, spec=spec1, metric=metric) - desc = await pc.db.index.describe(index_name) + await pc.db.index.create( + name=index_name, dimension=10, spec=spec1, metric=metric, tags=index_tags + ) + desc = await pc.db.index.describe(name=index_name) if isinstance(metric, str): assert desc.metric == metric else: @@ -62,15 +64,15 @@ async def test_create_default_index_with_metric(self, index_name, metric, spec1) await pc.close() @pytest.mark.parametrize( - "metric_enum,vector_type_enum,dim,tags", + "metric_enum,vector_type_enum,dim", [ - (Metric.COSINE, VectorType.DENSE, 10, None), - (Metric.EUCLIDEAN, VectorType.DENSE, 10, {"env": "prod"}), - (Metric.DOTPRODUCT, VectorType.SPARSE, None, {"env": "dev"}), + (Metric.COSINE, VectorType.DENSE, 10), + (Metric.EUCLIDEAN, VectorType.DENSE, 10), + (Metric.DOTPRODUCT, VectorType.SPARSE, None), ], ) async def test_create_with_enum_values_and_tags( - self, index_name, metric_enum, vector_type_enum, dim, tags + self, index_name, metric_enum, vector_type_enum, dim, index_tags ): pc = PineconeAsyncio() args = { @@ -79,14 +81,14 @@ async def test_create_with_enum_values_and_tags( "vector_type": vector_type_enum, "deletion_protection": DeletionProtection.DISABLED, "spec": ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1), - "tags": tags, + "tags": index_tags, } if dim is not None: args["dimension"] = dim await pc.db.index.create(**args) - desc = await pc.db.index.describe(index_name) + desc = await pc.db.index.describe(name=index_name) assert desc.metric == metric_enum.value assert desc.vector_type == vector_type_enum.value assert desc.dimension == dim @@ -94,48 +96,57 @@ async def test_create_with_enum_values_and_tags( assert desc.name == index_name assert desc.spec.serverless.cloud == "aws" assert desc.spec.serverless.region == "us-east-1" - if tags: - assert desc.tags.to_dict() == tags + assert desc.tags.to_dict() == index_tags + await pc.db.index.delete(name=index_name) await pc.close() @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) - async def test_create_dense_index_with_metric(self, index_name, spec1, metric): + async def test_create_dense_index_with_metric(self, index_name, spec1, metric, index_tags): pc = PineconeAsyncio() await pc.create_index( - name=index_name, dimension=10, spec=spec1, metric=metric, vector_type=VectorType.DENSE + name=index_name, + dimension=10, + spec=spec1, + metric=metric, + vector_type=VectorType.DENSE, + tags=index_tags, ) - desc = await pc.db.index.describe(index_name) + desc = await pc.db.index.describe(name=index_name) assert desc.metric == metric assert desc.vector_type == "dense" await pc.close() - async def test_create_with_optional_tags(self, index_name, spec1): + async def test_create_with_optional_tags(self, index_name, spec1, index_tags): pc = PineconeAsyncio() - tags = {"foo": "FOO", "bar": "BAR"} - await pc.create_index(name=index_name, dimension=10, spec=spec1, tags=tags) + await pc.create_index(name=index_name, dimension=10, spec=spec1, tags=index_tags) - desc = await pc.db.index.describe(index_name) - assert desc.tags.to_dict() == tags + desc = await pc.db.index.describe(name=index_name) + assert desc.tags.to_dict() == index_tags + await pc.db.index.delete(name=index_name) await pc.close() - async def test_create_sparse_index(self, index_name, spec1): + async def test_create_sparse_index(self, index_name, spec1, index_tags): pc = PineconeAsyncio() await pc.create_index( - name=index_name, spec=spec1, metric=Metric.DOTPRODUCT, vector_type=VectorType.SPARSE + name=index_name, + spec=spec1, + metric=Metric.DOTPRODUCT, + vector_type=VectorType.SPARSE, + tags=index_tags, ) - desc = await pc.db.index.describe(index_name) + desc = await pc.db.index.describe(name=index_name) assert desc.vector_type == "sparse" assert desc.dimension is None - assert desc.vector_type == "sparse" assert desc.metric == "dotproduct" + await pc.db.index.delete(name=index_name) await pc.close() - async def test_create_with_deletion_protection(self, index_name, spec1): + async def test_create_with_deletion_protection(self, index_name, spec1, index_tags): pc = PineconeAsyncio() await pc.create_index( @@ -144,9 +155,10 @@ async def test_create_with_deletion_protection(self, index_name, spec1): metric=Metric.DOTPRODUCT, vector_type=VectorType.SPARSE, deletion_protection=DeletionProtection.ENABLED, + tags=index_tags, ) - desc = await pc.db.index.describe(index_name) + desc = await pc.db.index.describe(name=index_name) assert desc.deletion_protection == "enabled" assert desc.metric == "dotproduct" assert desc.vector_type == "sparse" @@ -157,6 +169,6 @@ async def test_create_with_deletion_protection(self, index_name, spec1): await pc.configure_index(index_name, deletion_protection=DeletionProtection.DISABLED) - desc2 = await pc.db.index.describe(index_name) + desc2 = await pc.db.index.describe(name=index_name) assert desc2.deletion_protection == "disabled" await pc.close() diff --git a/tests/integration/control_asyncio/resources/restore_job/__init__.py b/tests/integration/control_asyncio/resources/restore_job/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/integration/control_asyncio/restore_job/test_describe.py b/tests/integration/control_asyncio/resources/restore_job/test_describe.py similarity index 100% rename from tests/integration/control_asyncio/restore_job/test_describe.py rename to tests/integration/control_asyncio/resources/restore_job/test_describe.py diff --git a/tests/integration/control_asyncio/restore_job/test_list.py b/tests/integration/control_asyncio/resources/restore_job/test_list.py similarity index 100% rename from tests/integration/control_asyncio/restore_job/test_list.py rename to tests/integration/control_asyncio/resources/restore_job/test_list.py diff --git a/tests/integration/control_asyncio/restore_job/conftest.py b/tests/integration/control_asyncio/restore_job/conftest.py deleted file mode 100644 index 3a7a56078..000000000 --- a/tests/integration/control_asyncio/restore_job/conftest.py +++ /dev/null @@ -1,220 +0,0 @@ -import pytest -import time -import random -import asyncio -import uuid -from ...helpers import get_environment_var, generate_index_name, index_tags as index_tags_helper -import logging -from typing import Callable, Optional, Awaitable, Union - -from pinecone import ( - CloudProvider, - AwsRegion, - ServerlessSpec, - PineconeApiException, - NotFoundException, -) - -logger = logging.getLogger(__name__) -""" @private """ - -# Generate a unique ID for the entire test run -RUN_ID = str(uuid.uuid4()) - - -@pytest.fixture() -def index_tags(request): - return index_tags_helper(request, RUN_ID) - - -logger = logging.getLogger(__name__) - - -def build_client(): - from pinecone import PineconeAsyncio - - return PineconeAsyncio() - - -@pytest.fixture(scope="session") -def client(): - # This returns the sync client. Not for use in tests - # but can be used to help with cleanup after test runs - from pinecone import Pinecone - - return Pinecone() - - -async def poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): - max_wait_time = 60 * 3 # 3 minutes - time_waited = 0 - wait_per_iteration = 5 - - while True: - stats = await asyncio_idx.describe_index_stats() - logger.debug( - "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", - asyncio_idx, - stats.total_vector_count, - target_vector_count, - ) - if target_namespace == "": - if stats.total_vector_count >= target_vector_count: - break - else: - if ( - target_namespace in stats.namespaces - and stats.namespaces[target_namespace].vector_count >= target_vector_count - ): - break - time_waited += wait_per_iteration - if time_waited >= max_wait_time: - raise TimeoutError( - "Timeout waiting for index to have expected vector count of {}".format( - target_vector_count - ) - ) - await asyncio.sleep(wait_per_iteration) - - return stats - - -async def wait_until( - condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], - timeout: Optional[float] = 10.0, - interval: float = 0.1, -) -> None: - """ - Waits asynchronously until the given (async or sync) condition returns True or times out. - - Args: - condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. - timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. - interval: Time in seconds between checks of the condition. - - Raises: - asyncio.TimeoutError: If the condition is not met within the timeout period. - """ - start_time = asyncio.get_event_loop().time() - - while True: - result = await condition() if asyncio.iscoroutinefunction(condition) else condition() - if result: - return - - if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: - raise asyncio.TimeoutError("Condition not met within the timeout period.") - - remaining_time = ( - (start_time + timeout) - asyncio.get_event_loop().time() - if timeout is not None - else None - ) - logger.debug( - "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", - interval, - remaining_time, - ) - await asyncio.sleep(interval) - - -@pytest.fixture() -def serverless_cloud(): - return get_environment_var("SERVERLESS_CLOUD", "aws") - - -@pytest.fixture() -def serverless_region(): - return get_environment_var("SERVERLESS_REGION", "us-west-2") - - -@pytest.fixture() -def spec1(serverless_cloud, serverless_region): - return {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - - -@pytest.fixture() -def spec2(): - return ServerlessSpec(cloud=CloudProvider.AWS, region=AwsRegion.US_EAST_1) - - -@pytest.fixture() -def spec3(): - return {"serverless": {"cloud": CloudProvider.AWS, "region": AwsRegion.US_EAST_1}} - - -@pytest.fixture() -def create_sl_index_params(index_name, serverless_cloud, serverless_region): - spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} - return dict(name=index_name, dimension=10, metric="cosine", spec=spec) - - -@pytest.fixture() -def random_vector(): - return [random.uniform(0, 1) for _ in range(10)] - - -@pytest.fixture() -def index_name(request): - test_name = request.node.name - return generate_index_name(test_name) - - -@pytest.fixture() -def ready_sl_index(client, index_name, create_sl_index_params): - create_sl_index_params["timeout"] = None - client.create_index(**create_sl_index_params) - yield index_name - client.delete_index(index_name, -1) - - -@pytest.fixture() -def notready_sl_index(client, index_name, create_sl_index_params): - client.create_index(**create_sl_index_params, timeout=-1) - yield index_name - - -def delete_with_retry(client, index_name, retries=0, sleep_interval=5): - logger.info( - f"Deleting index {index_name}, retry {retries}, next sleep interval {sleep_interval}" - ) - try: - client.delete_index(index_name, -1) - except NotFoundException: - pass - except PineconeApiException as e: - if e.error.code == "PRECONDITON_FAILED": - if retries > 5: - raise "Unable to delete index " + index_name - time.sleep(sleep_interval) - delete_with_retry(client, index_name, retries + 1, sleep_interval * 2) - else: - print(e.__class__) - print(e) - raise "Unable to delete index " + index_name - except Exception as e: - logger.warning(f"Failed to delete index: {index_name}: {str(e)}") - raise "Unable to delete index " + index_name - - -@pytest.fixture(autouse=True) -async def cleanup(client, index_name): - yield - - try: - desc = client.index.describe(name=index_name) - if desc.deletion_protection == "enabled": - logger.info(f"Disabling deletion protection for index: {index_name}") - client.index.configure(name=index_name, deletion_protection="disabled") - logger.debug("Attempting to delete index with name: " + index_name) - client.index.delete(name=index_name, timeout=-1) - except Exception as e: - logger.warning(f"Failed to delete index: {index_name}: {str(e)}") - pass - - for backup in client.db.backup.list(): - logger.debug(f"Deleting backup: {backup.name}") - try: - client.db.backup.delete(backup_id=backup.backup_id) - except Exception as e: - logger.warning(f"Failed to delete backup: {backup.name}: {str(e)}") diff --git a/tests/integration/helpers/__init__.py b/tests/integration/helpers/__init__.py index 3b680b3dd..afe12395a 100644 --- a/tests/integration/helpers/__init__.py +++ b/tests/integration/helpers/__init__.py @@ -9,4 +9,7 @@ embedding_values, jsonprint, index_tags, + delete_backups_from_run, + delete_indexes_from_run, + default_create_index_params, ) diff --git a/tests/integration/helpers/helpers.py b/tests/integration/helpers/helpers.py index 4dbe7d22a..8cb069dd7 100644 --- a/tests/integration/helpers/helpers.py +++ b/tests/integration/helpers/helpers.py @@ -4,11 +4,14 @@ import random import string import logging +import uuid +import asyncio from typing import Any from datetime import datetime import json from pinecone.db_data import _Index -from typing import List +from pinecone import Pinecone, NotFoundException, PineconeApiException +from typing import List, Callable, Awaitable, Optional, Union logger = logging.getLogger(__name__) @@ -91,8 +94,9 @@ def poll_stats_for_namespace( raise TimeoutError(f"Timed out waiting for namespace {namespace} to have vectors") else: total_time += delta_t + logger.debug(f"Found index stats: {stats}.") logger.debug( - f"Found {stats}. Waiting for {expected_count} vectors in namespace {namespace}." + f"Waiting for {expected_count} vectors in namespace {namespace}. Found {stats.namespaces.get(namespace, {'vector_count': 0})['vector_count']} vectors." ) time.sleep(delta_t) @@ -145,3 +149,145 @@ def index_tags(request, run_id): if os.getenv("USER"): tags["user"] = os.getenv("USER") return tags + + +def delete_backups_from_run(pc: Pinecone, run_id: str): + for backup in pc.db.backup.list(): + if backup.tags is not None and backup.tags.get("test-run") == run_id: + pc.db.backup.delete(backup_id=backup.backup_id) + else: + logger.info(f"Backup {backup.name} is not a test backup from run {run_id}. Skipping.") + + +def delete_indexes_from_run(pc: Pinecone, run_id: str): + indexes_to_delete = [] + + for index in pc.db.index.list(): + if index.tags is not None and index.tags.get("test-run") == run_id: + logger.info(f"Found index {index.name} to delete") + if index.deletion_protection == "enabled": + logger.info(f"Index {index.name} has deletion protection enabled. Disabling...") + pc.update_index(index.name, deletion_protection="disabled") + else: + logger.debug( + f"Index {index.name} has deletion protection disabled. Proceeding to delete." + ) + + indexes_to_delete.append(index.name) + else: + logger.info(f"Index {index.name} is not a test index from run {run_id}. Skipping.") + + for index_name in indexes_to_delete: + delete_index_with_retry(client=pc, index_name=index_name, retries=3, sleep_interval=10) + + +def delete_index_with_retry( + client: Pinecone, index_name: str, retries: int = 0, sleep_interval: int = 5 +): + logger.info( + f"Deleting index {index_name}, retry {retries}, next sleep interval {sleep_interval}" + ) + try: + client.delete_index(index_name, -1) + except NotFoundException: + pass + except PineconeApiException as e: + if e.error.code == "PRECONDITON_FAILED": + if retries > 5: + raise "Unable to delete index " + index_name + time.sleep(sleep_interval) + delete_index_with_retry(client, index_name, retries + 1, sleep_interval * 2) + else: + print(e.__class__) + print(e) + raise "Unable to delete index " + index_name + except Exception as e: + logger.warning(f"Failed to delete index: {index_name}: {str(e)}") + raise "Unable to delete index " + index_name + + +async def asyncio_poll_for_freshness(asyncio_idx, target_namespace, target_vector_count): + max_wait_time = 60 * 3 # 3 minutes + time_waited = 0 + wait_per_iteration = 5 + + while True: + stats = await asyncio_idx.describe_index_stats() + logger.debug( + "Polling for freshness on index %s. Current vector count: %s. Waiting for: %s", + asyncio_idx, + stats.total_vector_count, + target_vector_count, + ) + if target_namespace == "": + if stats.total_vector_count >= target_vector_count: + break + else: + if ( + target_namespace in stats.namespaces + and stats.namespaces[target_namespace].vector_count >= target_vector_count + ): + break + time_waited += wait_per_iteration + if time_waited >= max_wait_time: + raise TimeoutError( + "Timeout waiting for index to have expected vector count of {}".format( + target_vector_count + ) + ) + await asyncio.sleep(wait_per_iteration) + + return stats + + +async def asyncio_wait_until( + condition: Union[Callable[[], bool], Callable[[], Awaitable[bool]]], + timeout: Optional[float] = 10.0, + interval: float = 0.1, +) -> None: + """ + Waits asynchronously until the given (async or sync) condition returns True or times out. + + Args: + condition: A callable that returns a boolean or an awaitable boolean, indicating if the wait is over. + timeout: Maximum time in seconds to wait for the condition to become True. If None, wait indefinitely. + interval: Time in seconds between checks of the condition. + + Raises: + asyncio.TimeoutError: If the condition is not met within the timeout period. + """ + start_time = asyncio.get_event_loop().time() + + while True: + result = await condition() if asyncio.iscoroutinefunction(condition) else condition() + if result: + return + + if timeout is not None and (asyncio.get_event_loop().time() - start_time) > timeout: + raise asyncio.TimeoutError("Condition not met within the timeout period.") + + remaining_time = ( + (start_time + timeout) - asyncio.get_event_loop().time() + if timeout is not None + else None + ) + logger.debug( + "Condition not met yet. Waiting for %.2f seconds. Timeout in %.2f seconds.", + interval, + remaining_time, + ) + await asyncio.sleep(interval) + + +def default_create_index_params(request, run_id): + github_actor = os.getenv("GITHUB_ACTOR", None) + user = os.getenv("USER", None) + index_owner = github_actor or user or "unknown" + + index_name = f"{index_owner}-{str(uuid.uuid4())}" + tags = index_tags(request, run_id) + cloud = get_environment_var("SERVERLESS_CLOUD", "aws") + region = get_environment_var("SERVERLESS_REGION", "us-west-2") + + spec = {"serverless": {"cloud": cloud, "region": region}} + return {"name": index_name, "dimension": 10, "metric": "cosine", "spec": spec, "tags": tags}