diff --git a/.github/actions/setup-poetry/action.yml b/.github/actions/setup-poetry/action.yml index 2e76676f37..a15eb25327 100644 --- a/.github/actions/setup-poetry/action.yml +++ b/.github/actions/setup-poetry/action.yml @@ -8,7 +8,7 @@ inputs: poetry-version: description: Poetry version to set up required: true - default: '1.8.4' + default: '2.0.1' poetry-lockfile: description: Path to the Poetry lockfile to restore cache from required: true diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index fd98db24b9..98075c97cd 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -42,25 +42,23 @@ jobs: run: poetry install -C api --with dev - name: Check dependencies in pyproject.toml - run: poetry run -C api bash dev/pytest/pytest_artifacts.sh + run: poetry run -P api bash dev/pytest/pytest_artifacts.sh - name: Run Unit tests - run: poetry run -C api bash dev/pytest/pytest_unit_tests.sh + run: poetry run -P api bash dev/pytest/pytest_unit_tests.sh - name: Run ModelRuntime - run: poetry run -C api bash dev/pytest/pytest_model_runtime.sh + run: poetry run -P api bash dev/pytest/pytest_model_runtime.sh - name: Run dify config tests - run: poetry run -C api python dev/pytest/pytest_config_tests.py + run: poetry run -P api python dev/pytest/pytest_config_tests.py - name: Run Tool - run: poetry run -C api bash dev/pytest/pytest_tools.sh + run: poetry run -P api bash dev/pytest/pytest_tools.sh - name: Run mypy run: | - pushd api - poetry run python -m mypy --install-types --non-interactive . - popd + poetry run -C api python -m mypy --install-types --non-interactive . - name: Set up dotenvs run: | @@ -80,4 +78,4 @@ jobs: ssrf_proxy - name: Run Workflow - run: poetry run -C api bash dev/pytest/pytest_workflow.sh + run: poetry run -P api bash dev/pytest/pytest_workflow.sh diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index 12213380bd..c80037195a 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -38,12 +38,12 @@ jobs: if: steps.changed-files.outputs.any_changed == 'true' run: | poetry run -C api ruff --version - poetry run -C api ruff check ./api - poetry run -C api ruff format --check ./api + poetry run -C api ruff check ./ + poetry run -C api ruff format --check ./ - name: Dotenv check if: steps.changed-files.outputs.any_changed == 'true' - run: poetry run -C api dotenv-linter ./api/.env.example ./web/.env.example + run: poetry run -P api dotenv-linter ./api/.env.example ./web/.env.example - name: Lint hints if: failure() diff --git a/.github/workflows/vdb-tests.yml b/.github/workflows/vdb-tests.yml index 146bee95f2..fab0b8c426 100644 --- a/.github/workflows/vdb-tests.yml +++ b/.github/workflows/vdb-tests.yml @@ -70,4 +70,4 @@ jobs: tidb - name: Test Vector Stores - run: poetry run -C api bash dev/pytest/pytest_vdb.sh + run: poetry run -P api bash dev/pytest/pytest_vdb.sh diff --git a/api/.ruff.toml b/api/.ruff.toml index 89a2da35d6..800c592f1d 100644 --- a/api/.ruff.toml +++ b/api/.ruff.toml @@ -53,10 +53,12 @@ ignore = [ "FURB152", # math-constant "UP007", # non-pep604-annotation "UP032", # f-string + "UP045", # non-pep604-annotation-optional "B005", # strip-with-multi-characters "B006", # mutable-argument-default "B007", # unused-loop-control-variable "B026", # star-arg-unpacking-after-keyword-arg + "B903", # class-as-data-structure "B904", # raise-without-from-inside-except "B905", # zip-without-explicit-strict "N806", # non-lowercase-variable-in-function diff --git a/api/Dockerfile b/api/Dockerfile index df676f1926..fd3532e32d 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -4,7 +4,7 @@ FROM python:3.12-slim-bookworm AS base WORKDIR /app/api # Install Poetry -ENV POETRY_VERSION=1.8.4 +ENV POETRY_VERSION=2.0.1 # if you located in China, you can use aliyun mirror to speed up # RUN pip install --no-cache-dir poetry==${POETRY_VERSION} -i https://mirrors.aliyun.com/pypi/simple/ diff --git a/api/README.md b/api/README.md index 461dac4759..6e9f2e8fbb 100644 --- a/api/README.md +++ b/api/README.md @@ -79,5 +79,5 @@ 2. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml` ```bash - poetry run -C api bash dev/pytest/pytest_all_tests.sh + poetry run -P api bash dev/pytest/pytest_all_tests.sh ``` diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 59309fd25c..9e2ba41780 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -146,7 +146,7 @@ class EndpointConfig(BaseSettings): ) CONSOLE_WEB_URL: str = Field( - description="Base URL for the console web interface," "used for frontend references and CORS configuration", + description="Base URL for the console web interface,used for frontend references and CORS configuration", default="", ) diff --git a/api/configs/feature/hosted_service/__init__.py b/api/configs/feature/hosted_service/__init__.py index 7f103be8f4..7dd47e3658 100644 --- a/api/configs/feature/hosted_service/__init__.py +++ b/api/configs/feature/hosted_service/__init__.py @@ -181,7 +181,7 @@ class HostedFetchAppTemplateConfig(BaseSettings): """ HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field( - description="Mode for fetching app templates: remote, db, or builtin" " default to remote,", + description="Mode for fetching app templates: remote, db, or builtin default to remote,", default="remote", ) diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py index 278b1d3b8f..a54c5bf5ee 100644 --- a/api/configs/packaging/__init__.py +++ b/api/configs/packaging/__init__.py @@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings): CURRENT_VERSION: str = Field( description="Dify version", - default="0.15.0", + default="0.15.1", ) COMMIT_SHA: str = Field( diff --git a/api/controllers/console/admin.py b/api/controllers/console/admin.py index 52e0bb6c56..1286188f7f 100644 --- a/api/controllers/console/admin.py +++ b/api/controllers/console/admin.py @@ -56,7 +56,7 @@ class InsertExploreAppListApi(Resource): app = App.query.filter(App.id == args["app_id"]).first() if not app: - raise NotFound(f'App \'{args["app_id"]}\' is not found') + raise NotFound(f"App '{args['app_id']}' is not found") site = app.site if not site: diff --git a/api/controllers/console/app/audio.py b/api/controllers/console/app/audio.py index 9d26af276d..12d9157dda 100644 --- a/api/controllers/console/app/audio.py +++ b/api/controllers/console/app/audio.py @@ -22,7 +22,7 @@ from controllers.console.wraps import account_initialization_required, setup_req from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError from core.model_runtime.errors.invoke import InvokeError from libs.login import login_required -from models.model import AppMode +from models import App, AppMode from services.audio_service import AudioService from services.errors.audio import ( AudioTooLargeServiceError, @@ -79,7 +79,7 @@ class ChatMessageTextApi(Resource): @login_required @account_initialization_required @get_app_model - def post(self, app_model): + def post(self, app_model: App): from werkzeug.exceptions import InternalServerError try: @@ -98,9 +98,13 @@ class ChatMessageTextApi(Resource): and app_model.workflow.features_dict ): text_to_speech = app_model.workflow.features_dict.get("text_to_speech") + if text_to_speech is None: + raise ValueError("TTS is not enabled") voice = args.get("voice") or text_to_speech.get("voice") else: try: + if app_model.app_model_config is None: + raise ValueError("AppModelConfig not found") voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice") except Exception: voice = None diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 45c38dba3e..abb817b244 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -10,12 +10,7 @@ from controllers.console import api from controllers.console.apikey import api_key_fields, api_key_list from controllers.console.app.error import ProviderNotInitializeError from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError -from controllers.console.wraps import ( - account_initialization_required, - cloud_edition_billing_rate_limit_check, - enterprise_license_required, - setup_required, -) +from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError from core.indexing_runner import IndexingRunner from core.model_runtime.entities.model_entities import ModelType @@ -98,7 +93,6 @@ class DatasetListApi(Resource): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def post(self): parser = reqparse.RequestParser() parser.add_argument( @@ -213,7 +207,6 @@ class DatasetApi(Resource): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id): dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -317,7 +310,6 @@ class DatasetApi(Resource): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id): dataset_id_str = str(dataset_id) @@ -465,7 +457,7 @@ class DatasetIndexingEstimateApi(Resource): ) except LLMBadRequestError: raise ProviderNotInitializeError( - "No Embedding Model available. Please configure a valid provider " "in the Settings -> Model Provider." + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -627,8 +619,7 @@ class DatasetRetrievalSettingApi(Resource): vector_type = dify_config.VECTOR_STORE match vector_type: case ( - VectorType.MILVUS - | VectorType.RELYT + VectorType.RELYT | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA @@ -653,6 +644,7 @@ class DatasetRetrievalSettingApi(Resource): | VectorType.TIDB_ON_QDRANT | VectorType.LINDORM | VectorType.COUCHBASE + | VectorType.MILVUS ): return { "retrieval_method": [ diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index c625b640a7..c95214e9fb 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -27,7 +27,6 @@ from controllers.console.datasets.error import ( ) from controllers.console.wraps import ( account_initialization_required, - cloud_edition_billing_rate_limit_check, cloud_edition_billing_resource_check, setup_required, ) @@ -231,7 +230,6 @@ class DatasetDocumentListApi(Resource): @account_initialization_required @marshal_with(documents_and_batch_fields) @cloud_edition_billing_resource_check("vector_space") - @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id): dataset_id = str(dataset_id) @@ -287,7 +285,6 @@ class DatasetDocumentListApi(Resource): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id): dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -311,7 +308,6 @@ class DatasetInitApi(Resource): @account_initialization_required @marshal_with(dataset_and_document_fields) @cloud_edition_billing_resource_check("vector_space") - @cloud_edition_billing_rate_limit_check("knowledge") def post(self): # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: @@ -354,8 +350,7 @@ class DatasetInitApi(Resource): ) except InvokeAuthorizationError: raise ProviderNotInitializeError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -530,8 +525,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): return response.model_dump(), 200 except LLMBadRequestError: raise ProviderNotInitializeError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -684,7 +678,6 @@ class DocumentProcessingApi(DocumentResource): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, action): dataset_id = str(dataset_id) document_id = str(document_id) @@ -721,7 +714,6 @@ class DocumentDeleteApi(DocumentResource): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id): dataset_id = str(dataset_id) document_id = str(document_id) @@ -790,7 +782,6 @@ class DocumentStatusApi(DocumentResource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("vector_space") - @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, action): dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -886,7 +877,6 @@ class DocumentPauseApi(DocumentResource): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id): """pause document.""" dataset_id = str(dataset_id) @@ -919,7 +909,6 @@ class DocumentRecoverApi(DocumentResource): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id): """recover document.""" dataset_id = str(dataset_id) @@ -949,7 +938,6 @@ class DocumentRetryApi(DocumentResource): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id): """retry document.""" diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index 2dd86a1b32..d2c94045ad 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -19,7 +19,6 @@ from controllers.console.datasets.error import ( from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_knowledge_limit_check, - cloud_edition_billing_rate_limit_check, cloud_edition_billing_resource_check, setup_required, ) @@ -107,7 +106,6 @@ class DatasetDocumentSegmentListApi(Resource): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id): # check dataset dataset_id = str(dataset_id) @@ -139,7 +137,6 @@ class DatasetDocumentSegmentApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("vector_space") - @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, action): dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -171,8 +168,7 @@ class DatasetDocumentSegmentApi(Resource): ) except LLMBadRequestError: raise ProviderNotInitializeError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -195,7 +191,6 @@ class DatasetDocumentSegmentAddApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_knowledge_limit_check("add_segment") - @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id, document_id): # check dataset dataset_id = str(dataset_id) @@ -221,8 +216,7 @@ class DatasetDocumentSegmentAddApi(Resource): ) except LLMBadRequestError: raise ProviderNotInitializeError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -246,7 +240,6 @@ class DatasetDocumentSegmentUpdateApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("vector_space") - @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, segment_id): # check dataset dataset_id = str(dataset_id) @@ -272,8 +265,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): ) except LLMBadRequestError: raise ProviderNotInitializeError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -307,7 +299,6 @@ class DatasetDocumentSegmentUpdateApi(Resource): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id, segment_id): # check dataset dataset_id = str(dataset_id) @@ -345,7 +336,6 @@ class DatasetDocumentSegmentBatchImportApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_knowledge_limit_check("add_segment") - @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id, document_id): # check dataset dataset_id = str(dataset_id) @@ -412,7 +402,6 @@ class ChildChunkAddApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_knowledge_limit_check("add_segment") - @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id, document_id, segment_id): # check dataset dataset_id = str(dataset_id) @@ -445,8 +434,7 @@ class ChildChunkAddApi(Resource): ) except LLMBadRequestError: raise ProviderNotInitializeError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -511,7 +499,6 @@ class ChildChunkAddApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("vector_space") - @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, segment_id): # check dataset dataset_id = str(dataset_id) @@ -555,7 +542,6 @@ class ChildChunkUpdateApi(Resource): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id, segment_id, child_chunk_id): # check dataset dataset_id = str(dataset_id) @@ -600,7 +586,6 @@ class ChildChunkUpdateApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("vector_space") - @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, segment_id, child_chunk_id): # check dataset dataset_id = str(dataset_id) diff --git a/api/controllers/console/datasets/hit_testing.py b/api/controllers/console/datasets/hit_testing.py index d344e9d126..18b746f547 100644 --- a/api/controllers/console/datasets/hit_testing.py +++ b/api/controllers/console/datasets/hit_testing.py @@ -2,11 +2,7 @@ from flask_restful import Resource # type: ignore from controllers.console import api from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase -from controllers.console.wraps import ( - account_initialization_required, - cloud_edition_billing_rate_limit_check, - setup_required, -) +from controllers.console.wraps import account_initialization_required, setup_required from libs.login import login_required @@ -14,7 +10,6 @@ class HitTestingApi(Resource, DatasetsHitTestingBase): @setup_required @login_required @account_initialization_required - @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id): dataset_id_str = str(dataset_id) diff --git a/api/controllers/console/wraps.py b/api/controllers/console/wraps.py index e92c0ae952..111db7ccf2 100644 --- a/api/controllers/console/wraps.py +++ b/api/controllers/console/wraps.py @@ -1,6 +1,5 @@ import json import os -import time from functools import wraps from flask import abort, request @@ -8,7 +7,6 @@ from flask_login import current_user # type: ignore from configs import dify_config from controllers.console.workspace.error import AccountNotInitializedError -from extensions.ext_redis import redis_client from models.model import DifySetup from services.feature_service import FeatureService, LicenseStatus from services.operation_service import OperationService @@ -68,9 +66,7 @@ def cloud_edition_billing_resource_check(resource: str): elif resource == "apps" and 0 < apps.limit <= apps.size: abort(403, "The number of apps has reached the limit of your subscription.") elif resource == "vector_space" and 0 < vector_space.limit <= vector_space.size: - abort( - 403, "The capacity of the knowledge storage space has reached the limit of your subscription." - ) + abort(403, "The capacity of the vector space has reached the limit of your subscription.") elif resource == "documents" and 0 < documents_upload_quota.limit <= documents_upload_quota.size: # The api of file upload is used in the multiple places, # so we need to check the source of the request from datasets @@ -115,33 +111,6 @@ def cloud_edition_billing_knowledge_limit_check(resource: str): return interceptor -def cloud_edition_billing_rate_limit_check(resource: str): - def interceptor(view): - @wraps(view) - def decorated(*args, **kwargs): - if resource == "knowledge": - knowledge_rate_limit = FeatureService.get_knowledge_rate_limit(current_user.current_tenant_id) - if knowledge_rate_limit.enabled: - current_time = int(time.time() * 1000) - key = f"rate_limit_{current_user.current_tenant_id}" - - redis_client.zadd(key, {current_time: current_time}) - - redis_client.zremrangebyscore(key, 0, current_time - 60000) - - request_count = redis_client.zcard(key) - - if request_count > knowledge_rate_limit.limit: - abort( - 403, "Sorry, you have reached the knowledge base request rate limit of your subscription." - ) - return view(*args, **kwargs) - - return decorated - - return interceptor - - def cloud_utm_record(view): @wraps(view) def decorated(*args, **kwargs): diff --git a/api/controllers/service_api/dataset/segment.py b/api/controllers/service_api/dataset/segment.py index 1c500f51bf..25ae43f2ad 100644 --- a/api/controllers/service_api/dataset/segment.py +++ b/api/controllers/service_api/dataset/segment.py @@ -53,8 +53,7 @@ class SegmentApi(DatasetApiResource): ) except LLMBadRequestError: raise ProviderNotInitializeError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -95,8 +94,7 @@ class SegmentApi(DatasetApiResource): ) except LLMBadRequestError: raise ProviderNotInitializeError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -175,8 +173,7 @@ class DatasetSegmentApi(DatasetApiResource): ) except LLMBadRequestError: raise ProviderNotInitializeError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py index 43f718306b..c746944be1 100644 --- a/api/controllers/service_api/wraps.py +++ b/api/controllers/service_api/wraps.py @@ -1,4 +1,3 @@ -import time from collections.abc import Callable from datetime import UTC, datetime, timedelta from enum import Enum @@ -14,7 +13,6 @@ from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden, Unauthorized from extensions.ext_database import db -from extensions.ext_redis import redis_client from libs.login import _get_user from models.account import Account, Tenant, TenantAccountJoin, TenantStatus from models.model import ApiToken, App, EndUser @@ -141,35 +139,6 @@ def cloud_edition_billing_knowledge_limit_check(resource: str, api_token_type: s return interceptor -def cloud_edition_billing_rate_limit_check(resource: str, api_token_type: str): - def interceptor(view): - @wraps(view) - def decorated(*args, **kwargs): - api_token = validate_and_get_api_token(api_token_type) - - if resource == "knowledge": - knowledge_rate_limit = FeatureService.get_knowledge_rate_limit(api_token.tenant_id) - if knowledge_rate_limit.enabled: - current_time = int(time.time() * 1000) - key = f"rate_limit_{api_token.tenant_id}" - - redis_client.zadd(key, {current_time: current_time}) - - redis_client.zremrangebyscore(key, 0, current_time - 60000) - - request_count = redis_client.zcard(key) - - if request_count > knowledge_rate_limit.limit: - raise Forbidden( - "Sorry, you have reached the knowledge base request rate limit of your subscription." - ) - return view(*args, **kwargs) - - return decorated - - return interceptor - - def validate_dataset_token(view=None): def decorator(view): @wraps(view) @@ -226,7 +195,11 @@ def validate_and_get_api_token(scope: str | None = None): with Session(db.engine, expire_on_commit=False) as session: update_stmt = ( update(ApiToken) - .where(ApiToken.token == auth_token, ApiToken.last_used_at < cutoff_time, ApiToken.type == scope) + .where( + ApiToken.token == auth_token, + (ApiToken.last_used_at.is_(None) | (ApiToken.last_used_at < cutoff_time)), + ApiToken.type == scope, + ) .values(last_used_at=current_time) .returning(ApiToken) ) diff --git a/api/core/agent/cot_agent_runner.py b/api/core/agent/cot_agent_runner.py index e936acb605..bbe1865daf 100644 --- a/api/core/agent/cot_agent_runner.py +++ b/api/core/agent/cot_agent_runner.py @@ -172,7 +172,7 @@ class CotAgentRunner(BaseAgentRunner, ABC): self.save_agent_thought( agent_thought=agent_thought, - tool_name=scratchpad.action.action_name if scratchpad.action else "", + tool_name=(scratchpad.action.action_name if scratchpad.action and not scratchpad.is_final() else ""), tool_input={scratchpad.action.action_name: scratchpad.action.action_input} if scratchpad.action else {}, tool_invoke_meta={}, thought=scratchpad.thought or "", diff --git a/api/core/app/apps/base_app_queue_manager.py b/api/core/app/apps/base_app_queue_manager.py index 1842fc4303..ce2222a14e 100644 --- a/api/core/app/apps/base_app_queue_manager.py +++ b/api/core/app/apps/base_app_queue_manager.py @@ -167,8 +167,7 @@ class AppQueueManager: else: if isinstance(data, DeclarativeMeta) or hasattr(data, "_sa_instance_state"): raise TypeError( - "Critical Error: Passing SQLAlchemy Model instances " - "that cause thread safety issues is not allowed." + "Critical Error: Passing SQLAlchemy Model instances that cause thread safety issues is not allowed." ) diff --git a/api/core/app/apps/message_based_app_generator.py b/api/core/app/apps/message_based_app_generator.py index 4e3aa840ce..cccd62cd5b 100644 --- a/api/core/app/apps/message_based_app_generator.py +++ b/api/core/app/apps/message_based_app_generator.py @@ -89,6 +89,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): Conversation.id == conversation_id, Conversation.app_id == app_model.id, Conversation.status == "normal", + Conversation.is_deleted.is_(False), ] if isinstance(user, Account): diff --git a/api/core/app/task_pipeline/message_cycle_manage.py b/api/core/app/task_pipeline/message_cycle_manage.py index 6a4ab259ba..ef3a52442f 100644 --- a/api/core/app/task_pipeline/message_cycle_manage.py +++ b/api/core/app/task_pipeline/message_cycle_manage.py @@ -145,7 +145,7 @@ class MessageCycleManage: # get extension if "." in message_file.url: - extension = f'.{message_file.url.split(".")[-1]}' + extension = f".{message_file.url.split('.')[-1]}" if len(extension) > 10: extension = ".bin" else: diff --git a/api/core/external_data_tool/api/api.py b/api/core/external_data_tool/api/api.py index 9989c8a090..53acdf075f 100644 --- a/api/core/external_data_tool/api/api.py +++ b/api/core/external_data_tool/api/api.py @@ -62,8 +62,9 @@ class ApiExternalDataTool(ExternalDataTool): if not api_based_extension: raise ValueError( - "[External data tool] API query failed, variable: {}, " - "error: api_based_extension_id is invalid".format(self.variable) + "[External data tool] API query failed, variable: {}, error: api_based_extension_id is invalid".format( + self.variable + ) ) # decrypt api_key diff --git a/api/core/file/models.py b/api/core/file/models.py index 4b4674da09..0de0089430 100644 --- a/api/core/file/models.py +++ b/api/core/file/models.py @@ -90,7 +90,7 @@ class File(BaseModel): def markdown(self) -> str: url = self.generate_url() if self.type == FileType.IMAGE: - text = f'![{self.filename or ""}]({url})' + text = f"![{self.filename or ''}]({url})" else: text = f"[{self.filename or url}]({url})" diff --git a/api/core/llm_generator/prompts.py b/api/core/llm_generator/prompts.py index 7c0f247052..f9411e9ec7 100644 --- a/api/core/llm_generator/prompts.py +++ b/api/core/llm_generator/prompts.py @@ -131,7 +131,7 @@ JAVASCRIPT_CODE_GENERATOR_PROMPT_TEMPLATE = ( SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT = ( "Please help me predict the three most likely questions that human would ask, " "and keeping each question under 20 characters.\n" - "MAKE SURE your output is the SAME language as the Assistant's latest response" + "MAKE SURE your output is the SAME language as the Assistant's latest response. " "The output must be an array in JSON format following the specified schema:\n" '["question1","question2","question3"]\n' ) diff --git a/api/core/model_runtime/model_providers/azure_openai/llm/llm.py b/api/core/model_runtime/model_providers/azure_openai/llm/llm.py index 03818741f6..be6f90bc07 100644 --- a/api/core/model_runtime/model_providers/azure_openai/llm/llm.py +++ b/api/core/model_runtime/model_providers/azure_openai/llm/llm.py @@ -108,7 +108,7 @@ class AzureOpenAILargeLanguageModel(_CommonAzureOpenAI, LargeLanguageModel): ai_model_entity = self._get_ai_model_entity(base_model_name=base_model_name, model=model) if not ai_model_entity: - raise CredentialsValidateFailedError(f'Base Model Name {credentials["base_model_name"]} is invalid') + raise CredentialsValidateFailedError(f"Base Model Name {credentials['base_model_name']} is invalid") try: client = AzureOpenAI(**self._to_credential_kwargs(credentials)) diff --git a/api/core/model_runtime/model_providers/azure_openai/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/azure_openai/text_embedding/text_embedding.py index 69d2cfaded..05f1b5b557 100644 --- a/api/core/model_runtime/model_providers/azure_openai/text_embedding/text_embedding.py +++ b/api/core/model_runtime/model_providers/azure_openai/text_embedding/text_embedding.py @@ -130,7 +130,7 @@ class AzureOpenAITextEmbeddingModel(_CommonAzureOpenAI, TextEmbeddingModel): raise CredentialsValidateFailedError("Base Model Name is required") if not self._get_ai_model_entity(credentials["base_model_name"], model): - raise CredentialsValidateFailedError(f'Base Model Name {credentials["base_model_name"]} is invalid') + raise CredentialsValidateFailedError(f"Base Model Name {credentials['base_model_name']} is invalid") try: credentials_kwargs = self._to_credential_kwargs(credentials) diff --git a/api/core/model_runtime/model_providers/bedrock/rerank/rerank.py b/api/core/model_runtime/model_providers/bedrock/rerank/rerank.py index 9da23ba1b0..fc08852c1a 100644 --- a/api/core/model_runtime/model_providers/bedrock/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/bedrock/rerank/rerank.py @@ -70,7 +70,7 @@ class BedrockRerankModel(RerankModel): rerankingConfiguration = { "type": "BEDROCK_RERANKING_MODEL", "bedrockRerankingConfiguration": { - "numberOfResults": top_n, + "numberOfResults": min(top_n, len(text_sources)), "modelConfiguration": { "modelArn": model_package_arn, }, diff --git a/api/core/model_runtime/model_providers/deepseek/llm/_position.yaml b/api/core/model_runtime/model_providers/deepseek/llm/_position.yaml index 43d03f2ee9..eeac8aa405 100644 --- a/api/core/model_runtime/model_providers/deepseek/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/deepseek/llm/_position.yaml @@ -1,2 +1,3 @@ - deepseek-chat - deepseek-coder +- deepseek-reasoner diff --git a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml index 0bbd27ad74..a87f964a9e 100644 --- a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml +++ b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml @@ -10,7 +10,7 @@ features: - stream-tool-call model_properties: mode: chat - context_size: 128000 + context_size: 64000 parameter_rules: - name: temperature use_template: temperature diff --git a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml index 97310e76b9..ca9ba8cbf1 100644 --- a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml +++ b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml @@ -10,7 +10,7 @@ features: - stream-tool-call model_properties: mode: chat - context_size: 128000 + context_size: 64000 parameter_rules: - name: temperature use_template: temperature diff --git a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-reasoner.yaml b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-reasoner.yaml new file mode 100644 index 0000000000..a62bd7e7aa --- /dev/null +++ b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-reasoner.yaml @@ -0,0 +1,21 @@ +model: deepseek-reasoner +label: + zh_Hans: deepseek-reasoner + en_US: deepseek-reasoner +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 64000 +parameter_rules: + - name: max_tokens + use_template: max_tokens + min: 1 + max: 8192 + default: 4096 +pricing: + input: "4" + output: "16" + unit: "0.000001" + currency: RMB diff --git a/api/core/model_runtime/model_providers/deepseek/llm/llm.py b/api/core/model_runtime/model_providers/deepseek/llm/llm.py index 0a81f0c094..610dc7b458 100644 --- a/api/core/model_runtime/model_providers/deepseek/llm/llm.py +++ b/api/core/model_runtime/model_providers/deepseek/llm/llm.py @@ -24,9 +24,6 @@ class DeepseekLargeLanguageModel(OAIAPICompatLargeLanguageModel): user: Optional[str] = None, ) -> Union[LLMResult, Generator]: self._add_custom_parameters(credentials) - # {"response_format": "xx"} need convert to {"response_format": {"type": "xx"}} - if "response_format" in model_parameters: - model_parameters["response_format"] = {"type": model_parameters.get("response_format")} return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) def validate_credentials(self, model: str, credentials: dict) -> None: diff --git a/api/core/model_runtime/model_providers/google/llm/_position.yaml b/api/core/model_runtime/model_providers/google/llm/_position.yaml index 4ad0670e11..c65831c8f9 100644 --- a/api/core/model_runtime/model_providers/google/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/google/llm/_position.yaml @@ -1,5 +1,6 @@ - gemini-2.0-flash-exp - gemini-2.0-flash-thinking-exp-1219 +- gemini-2.0-flash-thinking-exp-01-21 - gemini-1.5-pro - gemini-1.5-pro-latest - gemini-1.5-pro-001 diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-2.0-flash-thinking-exp-01-21.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-2.0-flash-thinking-exp-01-21.yaml new file mode 100644 index 0000000000..7167626494 --- /dev/null +++ b/api/core/model_runtime/model_providers/google/llm/gemini-2.0-flash-thinking-exp-01-21.yaml @@ -0,0 +1,39 @@ +model: gemini-2.0-flash-thinking-exp-01-21 +label: + en_US: Gemini 2.0 Flash Thinking Exp 01-21 +model_type: llm +features: + - agent-thought + - vision + - document + - video + - audio +model_properties: + mode: chat + context_size: 32767 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + required: false + - name: max_output_tokens + use_template: max_tokens + default: 8192 + min: 1 + max: 8192 + - name: json_schema + use_template: json_schema +pricing: + input: '0.00' + output: '0.00' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/huggingface_hub/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/huggingface_hub/text_embedding/text_embedding.py index 4ca5379405..93a1b40f73 100644 --- a/api/core/model_runtime/model_providers/huggingface_hub/text_embedding/text_embedding.py +++ b/api/core/model_runtime/model_providers/huggingface_hub/text_embedding/text_embedding.py @@ -162,9 +162,9 @@ class HuggingfaceHubTextEmbeddingModel(_CommonHuggingfaceHub, TextEmbeddingModel @staticmethod def _check_endpoint_url_model_repository_name(credentials: dict, model_name: str): try: - url = f'{HUGGINGFACE_ENDPOINT_API}{credentials["huggingface_namespace"]}' + url = f"{HUGGINGFACE_ENDPOINT_API}{credentials['huggingface_namespace']}" headers = { - "Authorization": f'Bearer {credentials["huggingfacehub_api_token"]}', + "Authorization": f"Bearer {credentials['huggingfacehub_api_token']}", "Content-Type": "application/json", } diff --git a/api/core/model_runtime/model_providers/minimax/llm/llm.py b/api/core/model_runtime/model_providers/minimax/llm/llm.py index ca9b243c92..6ca32d8b24 100644 --- a/api/core/model_runtime/model_providers/minimax/llm/llm.py +++ b/api/core/model_runtime/model_providers/minimax/llm/llm.py @@ -34,6 +34,7 @@ from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage class MinimaxLargeLanguageModel(LargeLanguageModel): model_apis = { + "minimax-text-01": MinimaxChatCompletionPro, "abab7-chat-preview": MinimaxChatCompletionPro, "abab6.5t-chat": MinimaxChatCompletionPro, "abab6.5s-chat": MinimaxChatCompletionPro, diff --git a/api/core/model_runtime/model_providers/minimax/llm/minimax-text-01.yaml b/api/core/model_runtime/model_providers/minimax/llm/minimax-text-01.yaml new file mode 100644 index 0000000000..8f31aa872e --- /dev/null +++ b/api/core/model_runtime/model_providers/minimax/llm/minimax-text-01.yaml @@ -0,0 +1,46 @@ +model: minimax-text-01 +label: + en_US: Minimax-Text-01 +model_type: llm +features: + - agent-thought + - tool-call + - stream-tool-call +model_properties: + mode: chat + context_size: 1000192 +parameter_rules: + - name: temperature + use_template: temperature + min: 0.01 + max: 1 + default: 0.1 + - name: top_p + use_template: top_p + min: 0.01 + max: 1 + default: 0.95 + - name: max_tokens + use_template: max_tokens + required: true + default: 2048 + min: 1 + max: 1000192 + - name: mask_sensitive_info + type: boolean + default: true + label: + zh_Hans: 隐私保护 + en_US: Moderate + help: + zh_Hans: 对输出中易涉及隐私问题的文本信息进行打码,目前包括但不限于邮箱、域名、链接、证件号、家庭住址等,默认true,即开启打码 + en_US: Mask the sensitive info of the generated content, such as email/domain/link/address/phone/id.. + - name: presence_penalty + use_template: presence_penalty + - name: frequency_penalty + use_template: frequency_penalty +pricing: + input: '0.001' + output: '0.008' + unit: '0.001' + currency: RMB diff --git a/api/core/model_runtime/model_providers/moonshot/llm/llm.py b/api/core/model_runtime/model_providers/moonshot/llm/llm.py index cfee0b91e7..33fa02f0bd 100644 --- a/api/core/model_runtime/model_providers/moonshot/llm/llm.py +++ b/api/core/model_runtime/model_providers/moonshot/llm/llm.py @@ -44,9 +44,6 @@ class MoonshotLargeLanguageModel(OAIAPICompatLargeLanguageModel): self._add_custom_parameters(credentials) self._add_function_call(model, credentials) user = user[:32] if user else None - # {"response_format": "json_object"} need convert to {"response_format": {"type": "json_object"}} - if "response_format" in model_parameters: - model_parameters["response_format"] = {"type": model_parameters.get("response_format")} return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) def validate_credentials(self, model: str, credentials: dict) -> None: diff --git a/api/core/model_runtime/model_providers/openai/llm/llm.py b/api/core/model_runtime/model_providers/openai/llm/llm.py index 86042de6f4..634dbc5535 100644 --- a/api/core/model_runtime/model_providers/openai/llm/llm.py +++ b/api/core/model_runtime/model_providers/openai/llm/llm.py @@ -1,5 +1,6 @@ import json import logging +import re from collections.abc import Generator from typing import Any, Optional, Union, cast @@ -621,11 +622,19 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel): prompt_messages = self._clear_illegal_prompt_messages(model, prompt_messages) # o1 compatibility + block_as_stream = False if model.startswith("o1"): if "max_tokens" in model_parameters: model_parameters["max_completion_tokens"] = model_parameters["max_tokens"] del model_parameters["max_tokens"] + if re.match(r"^o1(-\d{4}-\d{2}-\d{2})?$", model): + if stream: + block_as_stream = True + stream = False + if "stream_options" in extra_model_kwargs: + del extra_model_kwargs["stream_options"] + if "stop" in extra_model_kwargs: del extra_model_kwargs["stop"] @@ -642,7 +651,45 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel): if stream: return self._handle_chat_generate_stream_response(model, credentials, response, prompt_messages, tools) - return self._handle_chat_generate_response(model, credentials, response, prompt_messages, tools) + block_result = self._handle_chat_generate_response(model, credentials, response, prompt_messages, tools) + + if block_as_stream: + return self._handle_chat_block_as_stream_response(block_result, prompt_messages, stop) + + return block_result + + def _handle_chat_block_as_stream_response( + self, + block_result: LLMResult, + prompt_messages: list[PromptMessage], + stop: Optional[list[str]] = None, + ) -> Generator[LLMResultChunk, None, None]: + """ + Handle llm chat response + :param model: model name + :param credentials: credentials + :param response: response + :param prompt_messages: prompt messages + :param tools: tools for tool calling + :return: llm response chunk generator + """ + text = block_result.message.content + text = cast(str, text) + + if stop: + text = self.enforce_stop_tokens(text, stop) + + yield LLMResultChunk( + model=block_result.model, + prompt_messages=prompt_messages, + system_fingerprint=block_result.system_fingerprint, + delta=LLMResultChunkDelta( + index=0, + message=block_result.message, + finish_reason="stop", + usage=block_result.usage, + ), + ) def _handle_chat_generate_response( self, diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/llm.py b/api/core/model_runtime/model_providers/siliconflow/llm/llm.py index f61e8b82e4..7a8aac9ca7 100644 --- a/api/core/model_runtime/model_providers/siliconflow/llm/llm.py +++ b/api/core/model_runtime/model_providers/siliconflow/llm/llm.py @@ -29,9 +29,6 @@ class SiliconflowLargeLanguageModel(OAIAPICompatLargeLanguageModel): user: Optional[str] = None, ) -> Union[LLMResult, Generator]: self._add_custom_parameters(credentials) - # {"response_format": "json_object"} need convert to {"response_format": {"type": "json_object"}} - if "response_format" in model_parameters: - model_parameters["response_format"] = {"type": model_parameters.get("response_format")} return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) def validate_credentials(self, model: str, credentials: dict) -> None: diff --git a/api/core/model_runtime/model_providers/spark/llm/_client.py b/api/core/model_runtime/model_providers/spark/llm/_client.py index 48911f657a..d8421cd7a0 100644 --- a/api/core/model_runtime/model_providers/spark/llm/_client.py +++ b/api/core/model_runtime/model_providers/spark/llm/_client.py @@ -21,7 +21,7 @@ class SparkLLMClient: domain = api_domain model_api_configs = { - "spark-lite": {"version": "v1.1", "chat_domain": "general"}, + "spark-lite": {"version": "v1.1", "chat_domain": "lite"}, "spark-pro": {"version": "v3.1", "chat_domain": "generalv3"}, "spark-pro-128k": {"version": "pro-128k", "chat_domain": "pro-128k"}, "spark-max": {"version": "v3.5", "chat_domain": "generalv3.5"}, diff --git a/api/core/model_runtime/model_providers/tongyi/llm/llm.py b/api/core/model_runtime/model_providers/tongyi/llm/llm.py index 61ebd45ed6..8214667427 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/llm.py +++ b/api/core/model_runtime/model_providers/tongyi/llm/llm.py @@ -257,8 +257,7 @@ class TongyiLargeLanguageModel(LargeLanguageModel): for index, response in enumerate(responses): if response.status_code not in {200, HTTPStatus.OK}: raise ServiceUnavailableError( - f"Failed to invoke model {model}, status code: {response.status_code}, " - f"message: {response.message}" + f"Failed to invoke model {model}, status code: {response.status_code}, message: {response.message}" ) resp_finish_reason = response.output.choices[0].finish_reason diff --git a/api/core/model_runtime/model_providers/triton_inference_server/llm/llm.py b/api/core/model_runtime/model_providers/triton_inference_server/llm/llm.py index 47a4b99214..3fb3012ae4 100644 --- a/api/core/model_runtime/model_providers/triton_inference_server/llm/llm.py +++ b/api/core/model_runtime/model_providers/triton_inference_server/llm/llm.py @@ -146,7 +146,7 @@ class TritonInferenceAILargeLanguageModel(LargeLanguageModel): elif credentials["completion_type"] == "completion": completion_type = LLMMode.COMPLETION.value else: - raise ValueError(f'completion_type {credentials["completion_type"]} is not supported') + raise ValueError(f"completion_type {credentials['completion_type']} is not supported") entity = AIModelEntity( model=model, diff --git a/api/core/model_runtime/model_providers/wenxin/_common.py b/api/core/model_runtime/model_providers/wenxin/_common.py index 1247a11fe8..957cfce436 100644 --- a/api/core/model_runtime/model_providers/wenxin/_common.py +++ b/api/core/model_runtime/model_providers/wenxin/_common.py @@ -41,15 +41,15 @@ class BaiduAccessToken: resp = response.json() if "error" in resp: if resp["error"] == "invalid_client": - raise InvalidAPIKeyError(f'Invalid API key or secret key: {resp["error_description"]}') + raise InvalidAPIKeyError(f"Invalid API key or secret key: {resp['error_description']}") elif resp["error"] == "unknown_error": - raise InternalServerError(f'Internal server error: {resp["error_description"]}') + raise InternalServerError(f"Internal server error: {resp['error_description']}") elif resp["error"] == "invalid_request": - raise BadRequestError(f'Bad request: {resp["error_description"]}') + raise BadRequestError(f"Bad request: {resp['error_description']}") elif resp["error"] == "rate_limit_exceeded": - raise RateLimitReachedError(f'Rate limit reached: {resp["error_description"]}') + raise RateLimitReachedError(f"Rate limit reached: {resp['error_description']}") else: - raise Exception(f'Unknown error: {resp["error_description"]}') + raise Exception(f"Unknown error: {resp['error_description']}") return resp["access_token"] diff --git a/api/core/model_runtime/model_providers/xinference/llm/llm.py b/api/core/model_runtime/model_providers/xinference/llm/llm.py index 7db1203641..e89b3ab613 100644 --- a/api/core/model_runtime/model_providers/xinference/llm/llm.py +++ b/api/core/model_runtime/model_providers/xinference/llm/llm.py @@ -406,7 +406,7 @@ class XinferenceAILargeLanguageModel(LargeLanguageModel): elif credentials["completion_type"] == "completion": completion_type = LLMMode.COMPLETION.value else: - raise ValueError(f'completion_type {credentials["completion_type"]} is not supported') + raise ValueError(f"completion_type {credentials['completion_type']} is not supported") else: extra_args = XinferenceHelper.get_xinference_extra_parameter( server_url=credentials["server_url"], @@ -472,7 +472,7 @@ class XinferenceAILargeLanguageModel(LargeLanguageModel): api_key = credentials.get("api_key") or "abc" client = OpenAI( - base_url=f'{credentials["server_url"]}/v1', + base_url=f"{credentials['server_url']}/v1", api_key=api_key, max_retries=int(credentials.get("max_retries") or DEFAULT_MAX_RETRIES), timeout=int(credentials.get("invoke_timeout") or DEFAULT_INVOKE_TIMEOUT), diff --git a/api/core/ops/entities/config_entity.py b/api/core/ops/entities/config_entity.py index ef0f9c708f..b484242b61 100644 --- a/api/core/ops/entities/config_entity.py +++ b/api/core/ops/entities/config_entity.py @@ -6,6 +6,7 @@ from pydantic import BaseModel, ValidationInfo, field_validator class TracingProviderEnum(Enum): LANGFUSE = "langfuse" LANGSMITH = "langsmith" + OPIK = "opik" class BaseTracingConfig(BaseModel): @@ -56,5 +57,36 @@ class LangSmithConfig(BaseTracingConfig): return v +class OpikConfig(BaseTracingConfig): + """ + Model class for Opik tracing config. + """ + + api_key: str | None = None + project: str | None = None + workspace: str | None = None + url: str = "https://www.comet.com/opik/api/" + + @field_validator("project") + @classmethod + def project_validator(cls, v, info: ValidationInfo): + if v is None or v == "": + v = "Default Project" + + return v + + @field_validator("url") + @classmethod + def url_validator(cls, v, info: ValidationInfo): + if v is None or v == "": + v = "https://www.comet.com/opik/api/" + if not v.startswith(("https://", "http://")): + raise ValueError("url must start with https:// or http://") + if not v.endswith("/api/"): + raise ValueError("url should ends with /api/") + + return v + + OPS_FILE_PATH = "ops_trace/" OPS_TRACE_FAILED_KEY = "FAILED_OPS_TRACE" diff --git a/api/core/ops/opik_trace/__init__.py b/api/core/ops/opik_trace/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/ops/opik_trace/opik_trace.py b/api/core/ops/opik_trace/opik_trace.py new file mode 100644 index 0000000000..fabf38fbd6 --- /dev/null +++ b/api/core/ops/opik_trace/opik_trace.py @@ -0,0 +1,469 @@ +import json +import logging +import os +import uuid +from datetime import datetime, timedelta +from typing import Optional, cast + +from opik import Opik, Trace +from opik.id_helpers import uuid4_to_uuid7 + +from core.ops.base_trace_instance import BaseTraceInstance +from core.ops.entities.config_entity import OpikConfig +from core.ops.entities.trace_entity import ( + BaseTraceInfo, + DatasetRetrievalTraceInfo, + GenerateNameTraceInfo, + MessageTraceInfo, + ModerationTraceInfo, + SuggestedQuestionTraceInfo, + ToolTraceInfo, + TraceTaskName, + WorkflowTraceInfo, +) +from extensions.ext_database import db +from models.model import EndUser, MessageFile +from models.workflow import WorkflowNodeExecution + +logger = logging.getLogger(__name__) + + +def wrap_dict(key_name, data): + """Make sure that the input data is a dict""" + if not isinstance(data, dict): + return {key_name: data} + + return data + + +def wrap_metadata(metadata, **kwargs): + """Add common metatada to all Traces and Spans""" + metadata["created_from"] = "dify" + + metadata.update(kwargs) + + return metadata + + +def prepare_opik_uuid(user_datetime: Optional[datetime], user_uuid: Optional[str]): + """Opik needs UUIDv7 while Dify uses UUIDv4 for identifier of most + messages and objects. The type-hints of BaseTraceInfo indicates that + objects start_time and message_id could be null which means we cannot map + it to a UUIDv7. Given that we have no way to identify that object + uniquely, generate a new random one UUIDv7 in that case. + """ + + if user_datetime is None: + user_datetime = datetime.now() + + if user_uuid is None: + user_uuid = str(uuid.uuid4()) + + return uuid4_to_uuid7(user_datetime, user_uuid) + + +class OpikDataTrace(BaseTraceInstance): + def __init__( + self, + opik_config: OpikConfig, + ): + super().__init__(opik_config) + self.opik_client = Opik( + project_name=opik_config.project, + workspace=opik_config.workspace, + host=opik_config.url, + api_key=opik_config.api_key, + ) + self.project = opik_config.project + self.file_base_url = os.getenv("FILES_URL", "http://127.0.0.1:5001") + + def trace(self, trace_info: BaseTraceInfo): + if isinstance(trace_info, WorkflowTraceInfo): + self.workflow_trace(trace_info) + if isinstance(trace_info, MessageTraceInfo): + self.message_trace(trace_info) + if isinstance(trace_info, ModerationTraceInfo): + self.moderation_trace(trace_info) + if isinstance(trace_info, SuggestedQuestionTraceInfo): + self.suggested_question_trace(trace_info) + if isinstance(trace_info, DatasetRetrievalTraceInfo): + self.dataset_retrieval_trace(trace_info) + if isinstance(trace_info, ToolTraceInfo): + self.tool_trace(trace_info) + if isinstance(trace_info, GenerateNameTraceInfo): + self.generate_name_trace(trace_info) + + def workflow_trace(self, trace_info: WorkflowTraceInfo): + dify_trace_id = trace_info.workflow_run_id + opik_trace_id = prepare_opik_uuid(trace_info.start_time, dify_trace_id) + workflow_metadata = wrap_metadata( + trace_info.metadata, message_id=trace_info.message_id, workflow_app_log_id=trace_info.workflow_app_log_id + ) + root_span_id = None + + if trace_info.message_id: + dify_trace_id = trace_info.message_id + opik_trace_id = prepare_opik_uuid(trace_info.start_time, dify_trace_id) + + trace_data = { + "id": opik_trace_id, + "name": TraceTaskName.MESSAGE_TRACE.value, + "start_time": trace_info.start_time, + "end_time": trace_info.end_time, + "metadata": workflow_metadata, + "input": wrap_dict("input", trace_info.workflow_run_inputs), + "output": wrap_dict("output", trace_info.workflow_run_outputs), + "tags": ["message", "workflow"], + "project_name": self.project, + } + self.add_trace(trace_data) + + root_span_id = prepare_opik_uuid(trace_info.start_time, trace_info.workflow_run_id) + span_data = { + "id": root_span_id, + "parent_span_id": None, + "trace_id": opik_trace_id, + "name": TraceTaskName.WORKFLOW_TRACE.value, + "input": wrap_dict("input", trace_info.workflow_run_inputs), + "output": wrap_dict("output", trace_info.workflow_run_outputs), + "start_time": trace_info.start_time, + "end_time": trace_info.end_time, + "metadata": workflow_metadata, + "tags": ["workflow"], + "project_name": self.project, + } + self.add_span(span_data) + else: + trace_data = { + "id": opik_trace_id, + "name": TraceTaskName.MESSAGE_TRACE.value, + "start_time": trace_info.start_time, + "end_time": trace_info.end_time, + "metadata": workflow_metadata, + "input": wrap_dict("input", trace_info.workflow_run_inputs), + "output": wrap_dict("output", trace_info.workflow_run_outputs), + "tags": ["workflow"], + "project_name": self.project, + } + self.add_trace(trace_data) + + # through workflow_run_id get all_nodes_execution + workflow_nodes_execution_id_records = ( + db.session.query(WorkflowNodeExecution.id) + .filter(WorkflowNodeExecution.workflow_run_id == trace_info.workflow_run_id) + .all() + ) + + for node_execution_id_record in workflow_nodes_execution_id_records: + node_execution = ( + db.session.query( + WorkflowNodeExecution.id, + WorkflowNodeExecution.tenant_id, + WorkflowNodeExecution.app_id, + WorkflowNodeExecution.title, + WorkflowNodeExecution.node_type, + WorkflowNodeExecution.status, + WorkflowNodeExecution.inputs, + WorkflowNodeExecution.outputs, + WorkflowNodeExecution.created_at, + WorkflowNodeExecution.elapsed_time, + WorkflowNodeExecution.process_data, + WorkflowNodeExecution.execution_metadata, + ) + .filter(WorkflowNodeExecution.id == node_execution_id_record.id) + .first() + ) + + if not node_execution: + continue + + node_execution_id = node_execution.id + tenant_id = node_execution.tenant_id + app_id = node_execution.app_id + node_name = node_execution.title + node_type = node_execution.node_type + status = node_execution.status + if node_type == "llm": + inputs = ( + json.loads(node_execution.process_data).get("prompts", {}) if node_execution.process_data else {} + ) + else: + inputs = json.loads(node_execution.inputs) if node_execution.inputs else {} + outputs = json.loads(node_execution.outputs) if node_execution.outputs else {} + created_at = node_execution.created_at or datetime.now() + elapsed_time = node_execution.elapsed_time + finished_at = created_at + timedelta(seconds=elapsed_time) + + execution_metadata = ( + json.loads(node_execution.execution_metadata) if node_execution.execution_metadata else {} + ) + metadata = execution_metadata.copy() + metadata.update( + { + "workflow_run_id": trace_info.workflow_run_id, + "node_execution_id": node_execution_id, + "tenant_id": tenant_id, + "app_id": app_id, + "app_name": node_name, + "node_type": node_type, + "status": status, + } + ) + + process_data = json.loads(node_execution.process_data) if node_execution.process_data else {} + + provider = None + model = None + total_tokens = 0 + completion_tokens = 0 + prompt_tokens = 0 + + if process_data and process_data.get("model_mode") == "chat": + run_type = "llm" + provider = process_data.get("model_provider", None) + model = process_data.get("model_name", "") + metadata.update( + { + "ls_provider": provider, + "ls_model_name": model, + } + ) + + try: + if outputs.get("usage"): + total_tokens = outputs["usage"].get("total_tokens", 0) + prompt_tokens = outputs["usage"].get("prompt_tokens", 0) + completion_tokens = outputs["usage"].get("completion_tokens", 0) + except Exception: + logger.error("Failed to extract usage", exc_info=True) + + else: + run_type = "tool" + + parent_span_id = trace_info.workflow_app_log_id or trace_info.workflow_run_id + + if not total_tokens: + total_tokens = execution_metadata.get("total_tokens", 0) + + span_data = { + "trace_id": opik_trace_id, + "id": prepare_opik_uuid(created_at, node_execution_id), + "parent_span_id": prepare_opik_uuid(trace_info.start_time, parent_span_id), + "name": node_type, + "type": run_type, + "start_time": created_at, + "end_time": finished_at, + "metadata": wrap_metadata(metadata), + "input": wrap_dict("input", inputs), + "output": wrap_dict("output", outputs), + "tags": ["node_execution"], + "project_name": self.project, + "usage": { + "total_tokens": total_tokens, + "completion_tokens": completion_tokens, + "prompt_tokens": prompt_tokens, + }, + "model": model, + "provider": provider, + } + + self.add_span(span_data) + + def message_trace(self, trace_info: MessageTraceInfo): + # get message file data + file_list = cast(list[str], trace_info.file_list) or [] + message_file_data: Optional[MessageFile] = trace_info.message_file_data + + if message_file_data is not None: + file_url = f"{self.file_base_url}/{message_file_data.url}" if message_file_data else "" + file_list.append(file_url) + + message_data = trace_info.message_data + if message_data is None: + return + + metadata = trace_info.metadata + message_id = trace_info.message_id + + user_id = message_data.from_account_id + metadata["user_id"] = user_id + metadata["file_list"] = file_list + + if message_data.from_end_user_id: + end_user_data: Optional[EndUser] = ( + db.session.query(EndUser).filter(EndUser.id == message_data.from_end_user_id).first() + ) + if end_user_data is not None: + end_user_id = end_user_data.session_id + metadata["end_user_id"] = end_user_id + + trace_data = { + "id": prepare_opik_uuid(trace_info.start_time, message_id), + "name": TraceTaskName.MESSAGE_TRACE.value, + "start_time": trace_info.start_time, + "end_time": trace_info.end_time, + "metadata": wrap_metadata(metadata), + "input": trace_info.inputs, + "output": message_data.answer, + "tags": ["message", str(trace_info.conversation_mode)], + "project_name": self.project, + } + trace = self.add_trace(trace_data) + + span_data = { + "trace_id": trace.id, + "name": "llm", + "type": "llm", + "start_time": trace_info.start_time, + "end_time": trace_info.end_time, + "metadata": wrap_metadata(metadata), + "input": {"input": trace_info.inputs}, + "output": {"output": message_data.answer}, + "tags": ["llm", str(trace_info.conversation_mode)], + "usage": { + "completion_tokens": trace_info.answer_tokens, + "prompt_tokens": trace_info.message_tokens, + "total_tokens": trace_info.total_tokens, + }, + "project_name": self.project, + } + self.add_span(span_data) + + def moderation_trace(self, trace_info: ModerationTraceInfo): + if trace_info.message_data is None: + return + + start_time = trace_info.start_time or trace_info.message_data.created_at + + span_data = { + "trace_id": prepare_opik_uuid(start_time, trace_info.message_id), + "name": TraceTaskName.MODERATION_TRACE.value, + "type": "tool", + "start_time": start_time, + "end_time": trace_info.end_time or trace_info.message_data.updated_at, + "metadata": wrap_metadata(trace_info.metadata), + "input": wrap_dict("input", trace_info.inputs), + "output": { + "action": trace_info.action, + "flagged": trace_info.flagged, + "preset_response": trace_info.preset_response, + "inputs": trace_info.inputs, + }, + "tags": ["moderation"], + } + + self.add_span(span_data) + + def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo): + message_data = trace_info.message_data + if message_data is None: + return + + start_time = trace_info.start_time or message_data.created_at + + span_data = { + "trace_id": prepare_opik_uuid(start_time, trace_info.message_id), + "name": TraceTaskName.SUGGESTED_QUESTION_TRACE.value, + "type": "tool", + "start_time": start_time, + "end_time": trace_info.end_time or message_data.updated_at, + "metadata": wrap_metadata(trace_info.metadata), + "input": wrap_dict("input", trace_info.inputs), + "output": wrap_dict("output", trace_info.suggested_question), + "tags": ["suggested_question"], + } + + self.add_span(span_data) + + def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo): + if trace_info.message_data is None: + return + + start_time = trace_info.start_time or trace_info.message_data.created_at + + span_data = { + "trace_id": prepare_opik_uuid(start_time, trace_info.message_id), + "name": TraceTaskName.DATASET_RETRIEVAL_TRACE.value, + "type": "tool", + "start_time": start_time, + "end_time": trace_info.end_time or trace_info.message_data.updated_at, + "metadata": wrap_metadata(trace_info.metadata), + "input": wrap_dict("input", trace_info.inputs), + "output": {"documents": trace_info.documents}, + "tags": ["dataset_retrieval"], + } + + self.add_span(span_data) + + def tool_trace(self, trace_info: ToolTraceInfo): + span_data = { + "trace_id": prepare_opik_uuid(trace_info.start_time, trace_info.message_id), + "name": trace_info.tool_name, + "type": "tool", + "start_time": trace_info.start_time, + "end_time": trace_info.end_time, + "metadata": wrap_metadata(trace_info.metadata), + "input": wrap_dict("input", trace_info.tool_inputs), + "output": wrap_dict("output", trace_info.tool_outputs), + "tags": ["tool", trace_info.tool_name], + } + + self.add_span(span_data) + + def generate_name_trace(self, trace_info: GenerateNameTraceInfo): + trace_data = { + "id": prepare_opik_uuid(trace_info.start_time, trace_info.message_id), + "name": TraceTaskName.GENERATE_NAME_TRACE.value, + "start_time": trace_info.start_time, + "end_time": trace_info.end_time, + "metadata": wrap_metadata(trace_info.metadata), + "input": trace_info.inputs, + "output": trace_info.outputs, + "tags": ["generate_name"], + "project_name": self.project, + } + + trace = self.add_trace(trace_data) + + span_data = { + "trace_id": trace.id, + "name": TraceTaskName.GENERATE_NAME_TRACE.value, + "start_time": trace_info.start_time, + "end_time": trace_info.end_time, + "metadata": wrap_metadata(trace_info.metadata), + "input": wrap_dict("input", trace_info.inputs), + "output": wrap_dict("output", trace_info.outputs), + "tags": ["generate_name"], + } + + self.add_span(span_data) + + def add_trace(self, opik_trace_data: dict) -> Trace: + try: + trace = self.opik_client.trace(**opik_trace_data) + logger.debug("Opik Trace created successfully") + return trace + except Exception as e: + raise ValueError(f"Opik Failed to create trace: {str(e)}") + + def add_span(self, opik_span_data: dict): + try: + self.opik_client.span(**opik_span_data) + logger.debug("Opik Span created successfully") + except Exception as e: + raise ValueError(f"Opik Failed to create span: {str(e)}") + + def api_check(self): + try: + self.opik_client.auth_check() + return True + except Exception as e: + logger.info(f"Opik API check failed: {str(e)}", exc_info=True) + raise ValueError(f"Opik API check failed: {str(e)}") + + def get_project_url(self): + try: + return self.opik_client.get_project_url(project_name=self.project) + except Exception as e: + logger.info(f"Opik get run url failed: {str(e)}", exc_info=True) + raise ValueError(f"Opik get run url failed: {str(e)}") diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 691cb8d400..c153e3f9dd 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -17,6 +17,7 @@ from core.ops.entities.config_entity import ( OPS_FILE_PATH, LangfuseConfig, LangSmithConfig, + OpikConfig, TracingProviderEnum, ) from core.ops.entities.trace_entity import ( @@ -32,6 +33,7 @@ from core.ops.entities.trace_entity import ( ) from core.ops.langfuse_trace.langfuse_trace import LangFuseDataTrace from core.ops.langsmith_trace.langsmith_trace import LangSmithDataTrace +from core.ops.opik_trace.opik_trace import OpikDataTrace from core.ops.utils import get_message_data from extensions.ext_database import db from extensions.ext_storage import storage @@ -52,6 +54,12 @@ provider_config_map: dict[str, dict[str, Any]] = { "other_keys": ["project", "endpoint"], "trace_instance": LangSmithDataTrace, }, + TracingProviderEnum.OPIK.value: { + "config_class": OpikConfig, + "secret_keys": ["api_key"], + "other_keys": ["project", "url", "workspace"], + "trace_instance": OpikDataTrace, + }, } diff --git a/api/core/provider_manager.py b/api/core/provider_manager.py index 010abd12d2..2430d598ff 100644 --- a/api/core/provider_manager.py +++ b/api/core/provider_manager.py @@ -22,7 +22,12 @@ from core.helper import encrypter from core.helper.model_provider_cache import ProviderCredentialsCache, ProviderCredentialsCacheType from core.helper.position_helper import is_filtered from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.entities.provider_entities import CredentialFormSchema, FormType, ProviderEntity +from core.model_runtime.entities.provider_entities import ( + ConfigurateMethod, + CredentialFormSchema, + FormType, + ProviderEntity, +) from core.model_runtime.model_providers import model_provider_factory from extensions import ext_hosting_provider from extensions.ext_database import db @@ -835,11 +840,18 @@ class ProviderManager: :return: """ # Get provider model credential secret variables - model_credential_secret_variables = self._extract_secret_variables( - provider_entity.model_credential_schema.credential_form_schemas - if provider_entity.model_credential_schema - else [] - ) + if ConfigurateMethod.PREDEFINED_MODEL in provider_entity.configurate_methods: + model_credential_secret_variables = self._extract_secret_variables( + provider_entity.provider_credential_schema.credential_form_schemas + if provider_entity.provider_credential_schema + else [] + ) + else: + model_credential_secret_variables = self._extract_secret_variables( + provider_entity.model_credential_schema.credential_form_schemas + if provider_entity.model_credential_schema + else [] + ) model_settings: list[ModelSettings] = [] if not provider_model_settings: diff --git a/api/core/rag/extractor/firecrawl/firecrawl_app.py b/api/core/rag/extractor/firecrawl/firecrawl_app.py index 8ae4579c7c..eac08aeb8b 100644 --- a/api/core/rag/extractor/firecrawl/firecrawl_app.py +++ b/api/core/rag/extractor/firecrawl/firecrawl_app.py @@ -31,7 +31,7 @@ class FirecrawlApp: "markdown": data.get("markdown"), } else: - raise Exception(f'Failed to scrape URL. Error: {response_data["error"]}') + raise Exception(f"Failed to scrape URL. Error: {response_data['error']}") elif response.status_code in {402, 409, 500}: error_message = response.json().get("error", "Unknown error occurred") diff --git a/api/core/rag/extractor/notion_extractor.py b/api/core/rag/extractor/notion_extractor.py index 41355d3fac..7ab248199a 100644 --- a/api/core/rag/extractor/notion_extractor.py +++ b/api/core/rag/extractor/notion_extractor.py @@ -358,8 +358,7 @@ class NotionExtractor(BaseExtractor): if not data_source_binding: raise Exception( - f"No notion data source binding found for tenant {tenant_id} " - f"and notion workspace {notion_workspace_id}" + f"No notion data source binding found for tenant {tenant_id} and notion workspace {notion_workspace_id}" ) return cast(str, data_source_binding.access_token) diff --git a/api/core/tools/provider/builtin/aippt/tools/aippt.py b/api/core/tools/provider/builtin/aippt/tools/aippt.py index cf10f5d255..0430a6654c 100644 --- a/api/core/tools/provider/builtin/aippt/tools/aippt.py +++ b/api/core/tools/provider/builtin/aippt/tools/aippt.py @@ -127,7 +127,7 @@ class AIPPTGenerateToolAdapter: response = response.json() if response.get("code") != 0: - raise Exception(f'Failed to create task: {response.get("msg")}') + raise Exception(f"Failed to create task: {response.get('msg')}") return response.get("data", {}).get("id") @@ -222,7 +222,7 @@ class AIPPTGenerateToolAdapter: elif model == "wenxin": response = response.json() if response.get("code") != 0: - raise Exception(f'Failed to generate content: {response.get("msg")}') + raise Exception(f"Failed to generate content: {response.get('msg')}") return response.get("data", "") @@ -254,7 +254,7 @@ class AIPPTGenerateToolAdapter: response = response.json() if response.get("code") != 0: - raise Exception(f'Failed to generate ppt: {response.get("msg")}') + raise Exception(f"Failed to generate ppt: {response.get('msg')}") id = response.get("data", {}).get("id") cover_url = response.get("data", {}).get("cover_url") @@ -270,7 +270,7 @@ class AIPPTGenerateToolAdapter: response = response.json() if response.get("code") != 0: - raise Exception(f'Failed to generate ppt: {response.get("msg")}') + raise Exception(f"Failed to generate ppt: {response.get('msg')}") export_code = response.get("data") if not export_code: @@ -290,7 +290,7 @@ class AIPPTGenerateToolAdapter: response = response.json() if response.get("code") != 0: - raise Exception(f'Failed to generate ppt: {response.get("msg")}') + raise Exception(f"Failed to generate ppt: {response.get('msg')}") if response.get("msg") == "导出中": current_iteration += 1 @@ -343,7 +343,7 @@ class AIPPTGenerateToolAdapter: raise Exception(f"Failed to connect to aippt: {response.text}") response = response.json() if response.get("code") != 0: - raise Exception(f'Failed to connect to aippt: {response.get("msg")}') + raise Exception(f"Failed to connect to aippt: {response.get('msg')}") token = response.get("data", {}).get("token") expire = response.get("data", {}).get("time_expire") @@ -379,7 +379,7 @@ class AIPPTGenerateToolAdapter: if cls._style_cache[key]["expire"] < now: del cls._style_cache[key] - key = f'{credentials["aippt_access_key"]}#@#{user_id}' + key = f"{credentials['aippt_access_key']}#@#{user_id}" if key in cls._style_cache: return cls._style_cache[key]["colors"], cls._style_cache[key]["styles"] @@ -396,11 +396,11 @@ class AIPPTGenerateToolAdapter: response = response.json() if response.get("code") != 0: - raise Exception(f'Failed to connect to aippt: {response.get("msg")}') + raise Exception(f"Failed to connect to aippt: {response.get('msg')}") colors = [ { - "id": f'id-{item.get("id")}', + "id": f"id-{item.get('id')}", "name": item.get("name"), "en_name": item.get("en_name", item.get("name")), } @@ -408,7 +408,7 @@ class AIPPTGenerateToolAdapter: ] styles = [ { - "id": f'id-{item.get("id")}', + "id": f"id-{item.get('id')}", "name": item.get("title"), } for item in response.get("data", {}).get("suit_style") or [] @@ -454,7 +454,7 @@ class AIPPTGenerateToolAdapter: response = response.json() if response.get("code") != 0: - raise Exception(f'Failed to connect to aippt: {response.get("msg")}') + raise Exception(f"Failed to connect to aippt: {response.get('msg')}") if len(response.get("data", {}).get("list") or []) > 0: return response.get("data", {}).get("list")[0].get("id") diff --git a/api/core/tools/provider/builtin/aws/tools/nova_reel.py b/api/core/tools/provider/builtin/aws/tools/nova_reel.py index bfd3d302b2..848df0b36b 100644 --- a/api/core/tools/provider/builtin/aws/tools/nova_reel.py +++ b/api/core/tools/provider/builtin/aws/tools/nova_reel.py @@ -229,8 +229,7 @@ class NovaReelTool(BuiltinTool): if async_mode: return self.create_text_message( - f"Video generation started.\nInvocation ARN: {invocation_arn}\n" - f"Video will be available at: {video_uri}" + f"Video generation started.\nInvocation ARN: {invocation_arn}\nVideo will be available at: {video_uri}" ) return self._wait_for_completion(bedrock, s3_client, invocation_arn) diff --git a/api/core/tools/provider/builtin/baidu_translate/tools/fieldtranslate.py b/api/core/tools/provider/builtin/baidu_translate/tools/fieldtranslate.py index bce259f31d..ff5cf32ddc 100644 --- a/api/core/tools/provider/builtin/baidu_translate/tools/fieldtranslate.py +++ b/api/core/tools/provider/builtin/baidu_translate/tools/fieldtranslate.py @@ -65,7 +65,7 @@ class BaiduFieldTranslateTool(BuiltinTool, BaiduTranslateToolBase): if "trans_result" in result: result_text = result["trans_result"][0]["dst"] else: - result_text = f'{result["error_code"]}: {result["error_msg"]}' + result_text = f"{result['error_code']}: {result['error_msg']}" return self.create_text_message(str(result_text)) except requests.RequestException as e: diff --git a/api/core/tools/provider/builtin/baidu_translate/tools/language.py b/api/core/tools/provider/builtin/baidu_translate/tools/language.py index 3bbaee88b3..b7fd692b7d 100644 --- a/api/core/tools/provider/builtin/baidu_translate/tools/language.py +++ b/api/core/tools/provider/builtin/baidu_translate/tools/language.py @@ -52,7 +52,7 @@ class BaiduLanguageTool(BuiltinTool, BaiduTranslateToolBase): result_text = "" if result["error_code"] != 0: - result_text = f'{result["error_code"]}: {result["error_msg"]}' + result_text = f"{result['error_code']}: {result['error_msg']}" else: result_text = result["data"]["src"] result_text = self.mapping_result(description_language, result_text) diff --git a/api/core/tools/provider/builtin/baidu_translate/tools/translate.py b/api/core/tools/provider/builtin/baidu_translate/tools/translate.py index 7cd816a3bc..0d25466a70 100644 --- a/api/core/tools/provider/builtin/baidu_translate/tools/translate.py +++ b/api/core/tools/provider/builtin/baidu_translate/tools/translate.py @@ -58,7 +58,7 @@ class BaiduTranslateTool(BuiltinTool, BaiduTranslateToolBase): if "trans_result" in result: result_text = result["trans_result"][0]["dst"] else: - result_text = f'{result["error_code"]}: {result["error_msg"]}' + result_text = f"{result['error_code']}: {result['error_msg']}" return self.create_text_message(str(result_text)) except requests.RequestException as e: diff --git a/api/core/tools/provider/builtin/bing/tools/bing_web_search.py b/api/core/tools/provider/builtin/bing/tools/bing_web_search.py index 1afe2f8385..0de6936983 100644 --- a/api/core/tools/provider/builtin/bing/tools/bing_web_search.py +++ b/api/core/tools/provider/builtin/bing/tools/bing_web_search.py @@ -30,7 +30,7 @@ class BingSearchTool(BuiltinTool): headers = {"Ocp-Apim-Subscription-Key": subscription_key, "Accept-Language": accept_language} query = quote(query) - server_url = f'{server_url}?q={query}&mkt={market_code}&count={limit}&responseFilter={",".join(filters)}' + server_url = f"{server_url}?q={query}&mkt={market_code}&count={limit}&responseFilter={','.join(filters)}" response = get(server_url, headers=headers) if response.status_code != 200: @@ -47,23 +47,23 @@ class BingSearchTool(BuiltinTool): results = [] if search_results: for result in search_results: - url = f': {result["url"]}' if "url" in result else "" - results.append(self.create_text_message(text=f'{result["name"]}{url}')) + url = f": {result['url']}" if "url" in result else "" + results.append(self.create_text_message(text=f"{result['name']}{url}")) if entities: for entity in entities: - url = f': {entity["url"]}' if "url" in entity else "" - results.append(self.create_text_message(text=f'{entity.get("name", "")}{url}')) + url = f": {entity['url']}" if "url" in entity else "" + results.append(self.create_text_message(text=f"{entity.get('name', '')}{url}")) if news: for news_item in news: - url = f': {news_item["url"]}' if "url" in news_item else "" - results.append(self.create_text_message(text=f'{news_item.get("name", "")}{url}')) + url = f": {news_item['url']}" if "url" in news_item else "" + results.append(self.create_text_message(text=f"{news_item.get('name', '')}{url}")) if related_searches: for related in related_searches: - url = f': {related["displayText"]}' if "displayText" in related else "" - results.append(self.create_text_message(text=f'{related.get("displayText", "")}{url}')) + url = f": {related['displayText']}" if "displayText" in related else "" + results.append(self.create_text_message(text=f"{related.get('displayText', '')}{url}")) return results elif result_type == "json": @@ -106,29 +106,29 @@ class BingSearchTool(BuiltinTool): text = "" if search_results: for i, result in enumerate(search_results): - text += f'{i + 1}: {result.get("name", "")} - {result.get("snippet", "")}\n' + text += f"{i + 1}: {result.get('name', '')} - {result.get('snippet', '')}\n" if computation and "expression" in computation and "value" in computation: text += "\nComputation:\n" - text += f'{computation["expression"]} = {computation["value"]}\n' + text += f"{computation['expression']} = {computation['value']}\n" if entities: text += "\nEntities:\n" for entity in entities: - url = f'- {entity["url"]}' if "url" in entity else "" - text += f'{entity.get("name", "")}{url}\n' + url = f"- {entity['url']}" if "url" in entity else "" + text += f"{entity.get('name', '')}{url}\n" if news: text += "\nNews:\n" for news_item in news: - url = f'- {news_item["url"]}' if "url" in news_item else "" - text += f'{news_item.get("name", "")}{url}\n' + url = f"- {news_item['url']}" if "url" in news_item else "" + text += f"{news_item.get('name', '')}{url}\n" if related_searches: text += "\n\nRelated Searches:\n" for related in related_searches: - url = f'- {related["webSearchUrl"]}' if "webSearchUrl" in related else "" - text += f'{related.get("displayText", "")}{url}\n' + url = f"- {related['webSearchUrl']}" if "webSearchUrl" in related else "" + text += f"{related.get('displayText', '')}{url}\n" return self.create_text_message(text=self.summary(user_id=user_id, content=text)) diff --git a/api/core/tools/provider/builtin/did/did_appx.py b/api/core/tools/provider/builtin/did/did_appx.py index c68878630d..dca62f9e19 100644 --- a/api/core/tools/provider/builtin/did/did_appx.py +++ b/api/core/tools/provider/builtin/did/did_appx.py @@ -83,5 +83,5 @@ class DIDApp: if status["status"] == "done": return status elif status["status"] == "error" or status["status"] == "rejected": - raise HTTPError(f'Talks {id} failed: {status["status"]} {status.get("error", {}).get("description")}') + raise HTTPError(f"Talks {id} failed: {status['status']} {status.get('error', {}).get('description')}") time.sleep(poll_interval) diff --git a/api/core/tools/provider/builtin/email/tools/send.py b/api/core/tools/provider/builtin/email/tools/send.py index bf9e63e1ef..2012d8b115 100644 --- a/api/core/tools/provider/builtin/email/tools/send.py +++ b/api/core/tools/provider/builtin/email/tools/send.py @@ -20,33 +20,33 @@ class SendEmailToolParameters(BaseModel): encrypt_method: str -def send_mail(parmas: SendEmailToolParameters): +def send_mail(params: SendEmailToolParameters): timeout = 60 msg = MIMEMultipart("alternative") - msg["From"] = parmas.email_account - msg["To"] = parmas.sender_to - msg["Subject"] = parmas.subject - msg.attach(MIMEText(parmas.email_content, "plain")) - msg.attach(MIMEText(parmas.email_content, "html")) + msg["From"] = params.email_account + msg["To"] = params.sender_to + msg["Subject"] = params.subject + msg.attach(MIMEText(params.email_content, "plain")) + msg.attach(MIMEText(params.email_content, "html")) ctx = ssl.create_default_context() - if parmas.encrypt_method.upper() == "SSL": + if params.encrypt_method.upper() == "SSL": try: - with smtplib.SMTP_SSL(parmas.smtp_server, parmas.smtp_port, context=ctx, timeout=timeout) as server: - server.login(parmas.email_account, parmas.email_password) - server.sendmail(parmas.email_account, parmas.sender_to, msg.as_string()) + with smtplib.SMTP_SSL(params.smtp_server, params.smtp_port, context=ctx, timeout=timeout) as server: + server.login(params.email_account, params.email_password) + server.sendmail(params.email_account, params.sender_to, msg.as_string()) return True except Exception as e: logging.exception("send email failed") return False else: # NONE or TLS try: - with smtplib.SMTP(parmas.smtp_server, parmas.smtp_port, timeout=timeout) as server: - if parmas.encrypt_method.upper() == "TLS": + with smtplib.SMTP(params.smtp_server, params.smtp_port, timeout=timeout) as server: + if params.encrypt_method.upper() == "TLS": server.starttls(context=ctx) - server.login(parmas.email_account, parmas.email_password) - server.sendmail(parmas.email_account, parmas.sender_to, msg.as_string()) + server.login(params.email_account, params.email_password) + server.sendmail(params.email_account, params.sender_to, msg.as_string()) return True except Exception as e: logging.exception("send email failed") diff --git a/api/core/tools/provider/builtin/firecrawl/firecrawl_appx.py b/api/core/tools/provider/builtin/firecrawl/firecrawl_appx.py index d9fb6f04bc..14596bf93f 100644 --- a/api/core/tools/provider/builtin/firecrawl/firecrawl_appx.py +++ b/api/core/tools/provider/builtin/firecrawl/firecrawl_appx.py @@ -74,7 +74,7 @@ class FirecrawlApp: if response is None: raise HTTPError("Failed to initiate crawl after multiple retries") elif response.get("success") == False: - raise HTTPError(f'Failed to crawl: {response.get("error")}') + raise HTTPError(f"Failed to crawl: {response.get('error')}") job_id: str = response["id"] if wait: return self._monitor_job_status(job_id=job_id, poll_interval=poll_interval) @@ -100,7 +100,7 @@ class FirecrawlApp: if status["status"] == "completed": return status elif status["status"] == "failed": - raise HTTPError(f'Job {job_id} failed: {status["error"]}') + raise HTTPError(f"Job {job_id} failed: {status['error']}") time.sleep(poll_interval) diff --git a/api/core/tools/provider/builtin/gaode/tools/gaode_weather.py b/api/core/tools/provider/builtin/gaode/tools/gaode_weather.py index ea06e2ce61..4642415e6d 100644 --- a/api/core/tools/provider/builtin/gaode/tools/gaode_weather.py +++ b/api/core/tools/provider/builtin/gaode/tools/gaode_weather.py @@ -37,8 +37,9 @@ class GaodeRepositoriesTool(BuiltinTool): CityCode = City_data["districts"][0]["adcode"] weatherInfo_response = s.request( method="GET", - url="{url}/weather/weatherInfo?city={citycode}&extensions=all&key={apikey}&output=json" - "".format(url=api_domain, citycode=CityCode, apikey=self.runtime.credentials.get("api_key")), + url="{url}/weather/weatherInfo?city={citycode}&extensions=all&key={apikey}&output=json".format( + url=api_domain, citycode=CityCode, apikey=self.runtime.credentials.get("api_key") + ), ) weatherInfo_data = weatherInfo_response.json() if weatherInfo_response.status_code == 200 and weatherInfo_data.get("info") == "OK": diff --git a/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.py b/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.py index ebcf13dc99..0ac9e2777d 100644 --- a/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.py +++ b/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.py @@ -11,19 +11,21 @@ class GitlabFilesTool(BuiltinTool): def _invoke( self, user_id: str, tool_parameters: dict[str, Any] ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - project = tool_parameters.get("project", "") repository = tool_parameters.get("repository", "") + project = tool_parameters.get("project", "") branch = tool_parameters.get("branch", "") path = tool_parameters.get("path", "") + file_path = tool_parameters.get("file_path", "") - if not project and not repository: - return self.create_text_message("Either project or repository is required") + if not repository and not project: + return self.create_text_message("Either repository or project is required") if not branch: return self.create_text_message("Branch is required") - if not path: - return self.create_text_message("Path is required") + if not path and not file_path: + return self.create_text_message("Either path or file_path is required") access_token = self.runtime.credentials.get("access_tokens") + headers = {"PRIVATE-TOKEN": access_token} site_url = self.runtime.credentials.get("site_url") if "access_tokens" not in self.runtime.credentials or not self.runtime.credentials.get("access_tokens"): @@ -31,33 +33,45 @@ class GitlabFilesTool(BuiltinTool): if "site_url" not in self.runtime.credentials or not self.runtime.credentials.get("site_url"): site_url = "https://gitlab.com" - # Get file content if repository: - result = self.fetch_files(site_url, access_token, repository, branch, path, is_repository=True) + # URL encode the repository path + identifier = urllib.parse.quote(repository, safe="") else: - result = self.fetch_files(site_url, access_token, project, branch, path, is_repository=False) + identifier = self.get_project_id(site_url, access_token, project) + if not identifier: + raise Exception(f"Project '{project}' not found.)") - return [self.create_json_message(item) for item in result] + # Get file content + if path: + results = self.fetch_files(site_url, headers, identifier, branch, path) + return [self.create_json_message(item) for item in results] + else: + result = self.fetch_file(site_url, headers, identifier, branch, file_path) + return [self.create_json_message(result)] + + @staticmethod + def fetch_file( + site_url: str, + headers: dict[str, str], + identifier: str, + branch: str, + path: str, + ) -> dict[str, Any]: + encoded_file_path = urllib.parse.quote(path, safe="") + file_url = f"{site_url}/api/v4/projects/{identifier}/repository/files/{encoded_file_path}/raw?ref={branch}" + + file_response = requests.get(file_url, headers=headers) + file_response.raise_for_status() + file_content = file_response.text + return {"path": path, "branch": branch, "content": file_content} def fetch_files( - self, site_url: str, access_token: str, identifier: str, branch: str, path: str, is_repository: bool + self, site_url: str, headers: dict[str, str], identifier: str, branch: str, path: str ) -> list[dict[str, Any]]: - domain = site_url - headers = {"PRIVATE-TOKEN": access_token} results = [] try: - if is_repository: - # URL encode the repository path - encoded_identifier = urllib.parse.quote(identifier, safe="") - tree_url = f"{domain}/api/v4/projects/{encoded_identifier}/repository/tree?path={path}&ref={branch}" - else: - # Get project ID from project name - project_id = self.get_project_id(site_url, access_token, identifier) - if not project_id: - return self.create_text_message(f"Project '{identifier}' not found.") - tree_url = f"{domain}/api/v4/projects/{project_id}/repository/tree?path={path}&ref={branch}" - + tree_url = f"{site_url}/api/v4/projects/{identifier}/repository/tree?path={path}&ref={branch}" response = requests.get(tree_url, headers=headers) response.raise_for_status() items = response.json() @@ -65,26 +79,10 @@ class GitlabFilesTool(BuiltinTool): for item in items: item_path = item["path"] if item["type"] == "tree": # It's a directory - results.extend( - self.fetch_files(site_url, access_token, identifier, branch, item_path, is_repository) - ) + results.extend(self.fetch_files(site_url, headers, identifier, branch, item_path)) else: # It's a file - encoded_item_path = urllib.parse.quote(item_path, safe="") - if is_repository: - file_url = ( - f"{domain}/api/v4/projects/{encoded_identifier}/repository/files" - f"/{encoded_item_path}/raw?ref={branch}" - ) - else: - file_url = ( - f"{domain}/api/v4/projects/{project_id}/repository/files" - f"{encoded_item_path}/raw?ref={branch}" - ) - - file_response = requests.get(file_url, headers=headers) - file_response.raise_for_status() - file_content = file_response.text - results.append({"path": item_path, "branch": branch, "content": file_content}) + result = self.fetch_file(site_url, headers, identifier, branch, item_path) + results.append(result) except requests.RequestException as e: print(f"Error fetching data from GitLab: {e}") diff --git a/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.yaml b/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.yaml index 4c733673f1..3371f62fa8 100644 --- a/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.yaml +++ b/api/core/tools/provider/builtin/gitlab/tools/gitlab_files.yaml @@ -29,7 +29,7 @@ parameters: zh_Hans: 项目 human_description: en_US: project - zh_Hans: 项目 + zh_Hans: 项目(和仓库路径二选一,都填写以仓库路径优先) llm_description: Project for GitLab form: llm - name: branch @@ -45,12 +45,21 @@ parameters: form: llm - name: path type: string - required: true label: en_US: path - zh_Hans: 文件路径 + zh_Hans: 文件夹 human_description: en_US: path + zh_Hans: 文件夹 + llm_description: Dir path for GitLab + form: llm + - name: file_path + type: string + label: + en_US: file_path zh_Hans: 文件路径 + human_description: + en_US: file_path + zh_Hans: 文件路径(和文件夹二选一,都填写以文件夹优先) llm_description: File path for GitLab form: llm diff --git a/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.py b/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.py index d6ac3688b7..9e43d5c532 100644 --- a/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.py +++ b/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.py @@ -110,7 +110,7 @@ class ListWorksheetRecordsTool(BuiltinTool): result["rows"].append(self.get_row_field_value(row, schema)) return self.create_text_message(json.dumps(result, ensure_ascii=False)) else: - result_text = f"Found {result['total']} rows in worksheet \"{worksheet_name}\"." + result_text = f'Found {result["total"]} rows in worksheet "{worksheet_name}".' if result["total"] > 0: result_text += ( f" The following are {min(limit, result['total'])}" diff --git a/api/core/tools/provider/builtin/stability/tools/base.py b/api/core/tools/provider/builtin/stability/tools/base.py index c3b7edbefa..2d1cd92870 100644 --- a/api/core/tools/provider/builtin/stability/tools/base.py +++ b/api/core/tools/provider/builtin/stability/tools/base.py @@ -28,4 +28,4 @@ class BaseStabilityAuthorization: """ This method is responsible for generating the authorization headers. """ - return {"Authorization": f'Bearer {credentials.get("api_key", "")}'} + return {"Authorization": f"Bearer {credentials.get('api_key', '')}"} diff --git a/api/core/tools/provider/builtin/vanna/vanna.py b/api/core/tools/provider/builtin/vanna/vanna.py index 1d71414bf3..4f9cac2beb 100644 --- a/api/core/tools/provider/builtin/vanna/vanna.py +++ b/api/core/tools/provider/builtin/vanna/vanna.py @@ -38,7 +38,7 @@ class VannaProvider(BuiltinToolProviderController): tool_parameters={ "model": "chinook", "db_type": "SQLite", - "url": f'{self._get_protocol_and_main_domain(credentials["base_url"])}/Chinook.sqlite', + "url": f"{self._get_protocol_and_main_domain(credentials['base_url'])}/Chinook.sqlite", "query": "What are the top 10 customers by sales?", }, ) diff --git a/api/core/tools/provider/builtin/websearch/tools/job_search.py b/api/core/tools/provider/builtin/websearch/tools/job_search.py index 293f4f6329..13eb403391 100644 --- a/api/core/tools/provider/builtin/websearch/tools/job_search.py +++ b/api/core/tools/provider/builtin/websearch/tools/job_search.py @@ -43,7 +43,7 @@ class SerplyApi: def parse_results(res: dict) -> str: """Process response from Serply Job Search.""" jobs = res.get("jobs", []) - if not jobs: + if not res or "jobs" not in res: raise ValueError(f"Got error from Serply: {res}") string = [] diff --git a/api/core/tools/provider/builtin/websearch/tools/news_search.py b/api/core/tools/provider/builtin/websearch/tools/news_search.py index 9b5482fe18..7a8a732ff3 100644 --- a/api/core/tools/provider/builtin/websearch/tools/news_search.py +++ b/api/core/tools/provider/builtin/websearch/tools/news_search.py @@ -43,7 +43,7 @@ class SerplyApi: def parse_results(res: dict) -> str: """Process response from Serply News Search.""" news = res.get("entries", []) - if not news: + if not res or "entries" not in res: raise ValueError(f"Got error from Serply: {res}") string = [] diff --git a/api/core/tools/provider/builtin/websearch/tools/scholar_search.py b/api/core/tools/provider/builtin/websearch/tools/scholar_search.py index 798d059b51..32c5d39e5b 100644 --- a/api/core/tools/provider/builtin/websearch/tools/scholar_search.py +++ b/api/core/tools/provider/builtin/websearch/tools/scholar_search.py @@ -43,7 +43,7 @@ class SerplyApi: def parse_results(res: dict) -> str: """Process response from Serply News Search.""" articles = res.get("articles", []) - if not articles: + if not res or "articles" not in res: raise ValueError(f"Got error from Serply: {res}") string = [] diff --git a/api/core/tools/provider/builtin/websearch/tools/web_search.py b/api/core/tools/provider/builtin/websearch/tools/web_search.py index fe363ac7a4..d0e93cb0fa 100644 --- a/api/core/tools/provider/builtin/websearch/tools/web_search.py +++ b/api/core/tools/provider/builtin/websearch/tools/web_search.py @@ -42,7 +42,7 @@ class SerplyApi: def parse_results(res: dict) -> str: """Process response from Serply Web Search.""" results = res.get("results", []) - if not results: + if not res or "results" not in res: raise ValueError(f"Got error from Serply: {res}") string = [] diff --git a/api/core/tools/tool/api_tool.py b/api/core/tools/tool/api_tool.py index 7d27c4fcf1..6904fecb46 100644 --- a/api/core/tools/tool/api_tool.py +++ b/api/core/tools/tool/api_tool.py @@ -84,9 +84,9 @@ class ApiTool(Tool): if "api_key_header_prefix" in credentials: api_key_header_prefix = credentials["api_key_header_prefix"] if api_key_header_prefix == "basic" and credentials["api_key_value"]: - credentials["api_key_value"] = f'Basic {credentials["api_key_value"]}' + credentials["api_key_value"] = f"Basic {credentials['api_key_value']}" elif api_key_header_prefix == "bearer" and credentials["api_key_value"]: - credentials["api_key_value"] = f'Bearer {credentials["api_key_value"]}' + credentials["api_key_value"] = f"Bearer {credentials['api_key_value']}" elif api_key_header_prefix == "custom": pass diff --git a/api/core/tools/utils/message_transformer.py b/api/core/tools/utils/message_transformer.py index 3509f1e6e5..b28953264c 100644 --- a/api/core/tools/utils/message_transformer.py +++ b/api/core/tools/utils/message_transformer.py @@ -29,7 +29,7 @@ class ToolFileMessageTransformer: user_id=user_id, tenant_id=tenant_id, conversation_id=conversation_id, file_url=message.message ) - url = f'/files/tools/{file.id}{guess_extension(file.mimetype) or ".png"}' + url = f"/files/tools/{file.id}{guess_extension(file.mimetype) or '.png'}" result.append( ToolInvokeMessage( @@ -122,4 +122,4 @@ class ToolFileMessageTransformer: @classmethod def get_tool_file_url(cls, tool_file_id: str, extension: Optional[str]) -> str: - return f'/files/tools/{tool_file_id}{extension or ".bin"}' + return f"/files/tools/{tool_file_id}{extension or '.bin'}" diff --git a/api/core/tools/utils/parser.py b/api/core/tools/utils/parser.py index 30e4fdcf06..b15a86b5c0 100644 --- a/api/core/tools/utils/parser.py +++ b/api/core/tools/utils/parser.py @@ -5,6 +5,7 @@ from json import loads as json_loads from json.decoder import JSONDecodeError from typing import Optional +from flask import request from requests import get from yaml import YAMLError, safe_load # type: ignore @@ -29,6 +30,10 @@ class ApiBasedToolSchemaParser: raise ToolProviderNotFoundError("No server found in the openapi yaml.") server_url = openapi["servers"][0]["url"] + request_env = request.headers.get("X-Request-Env") + if request_env: + matched_servers = [server["url"] for server in openapi["servers"] if server["env"] == request_env] + server_url = matched_servers[0] if matched_servers else server_url # list all interfaces interfaces = [] @@ -144,7 +149,7 @@ class ApiBasedToolSchemaParser: if not path: path = str(uuid.uuid4()) - interface["operation"]["operationId"] = f'{path}_{interface["method"]}' + interface["operation"]["operationId"] = f"{path}_{interface['method']}" bundles.append( ApiToolBundle( diff --git a/api/core/variables/segments.py b/api/core/variables/segments.py index 69bd5567a4..a9f5651692 100644 --- a/api/core/variables/segments.py +++ b/api/core/variables/segments.py @@ -134,6 +134,10 @@ class ArrayStringSegment(ArraySegment): value_type: SegmentType = SegmentType.ARRAY_STRING value: Sequence[str] + @property + def text(self) -> str: + return json.dumps(self.value) + class ArrayNumberSegment(ArraySegment): value_type: SegmentType = SegmentType.ARRAY_NUMBER diff --git a/api/core/workflow/nodes/answer/base_stream_processor.py b/api/core/workflow/nodes/answer/base_stream_processor.py index f22ea078fb..4759356ae1 100644 --- a/api/core/workflow/nodes/answer/base_stream_processor.py +++ b/api/core/workflow/nodes/answer/base_stream_processor.py @@ -1,6 +1,7 @@ import logging from abc import ABC, abstractmethod from collections.abc import Generator +from typing import Optional from core.workflow.entities.variable_pool import VariablePool from core.workflow.graph_engine.entities.event import GraphEngineEvent, NodeRunExceptionEvent, NodeRunSucceededEvent @@ -48,25 +49,35 @@ class StreamProcessor(ABC): # we remove the node maybe shortcut the answer node, so comment this code for now # there is not effect on the answer node and the workflow, when we have a better solution # we can open this code. Issues: #11542 #9560 #10638 #10564 - ids = self._fetch_node_ids_in_reachable_branch(edge.target_node_id) - if "answer" in ids: - continue - else: - reachable_node_ids.extend(ids) + # ids = self._fetch_node_ids_in_reachable_branch(edge.target_node_id) + # if "answer" in ids: + # continue + # else: + # reachable_node_ids.extend(ids) + + # The branch_identify parameter is added to ensure that + # only nodes in the correct logical branch are included. + ids = self._fetch_node_ids_in_reachable_branch(edge.target_node_id, run_result.edge_source_handle) + reachable_node_ids.extend(ids) else: unreachable_first_node_ids.append(edge.target_node_id) for node_id in unreachable_first_node_ids: self._remove_node_ids_in_unreachable_branch(node_id, reachable_node_ids) - def _fetch_node_ids_in_reachable_branch(self, node_id: str) -> list[str]: + def _fetch_node_ids_in_reachable_branch(self, node_id: str, branch_identify: Optional[str] = None) -> list[str]: node_ids = [] for edge in self.graph.edge_mapping.get(node_id, []): if edge.target_node_id == self.graph.root_node_id: continue + # Only follow edges that match the branch_identify or have no run_condition + if edge.run_condition and edge.run_condition.branch_identify: + if not branch_identify or edge.run_condition.branch_identify != branch_identify: + continue + node_ids.append(edge.target_node_id) - node_ids.extend(self._fetch_node_ids_in_reachable_branch(edge.target_node_id)) + node_ids.extend(self._fetch_node_ids_in_reachable_branch(edge.target_node_id, branch_identify)) return node_ids def _remove_node_ids_in_unreachable_branch(self, node_id: str, reachable_node_ids: list[str]) -> None: diff --git a/api/core/workflow/nodes/http_request/executor.py b/api/core/workflow/nodes/http_request/executor.py index 87b71394e4..5ed2cd6164 100644 --- a/api/core/workflow/nodes/http_request/executor.py +++ b/api/core/workflow/nodes/http_request/executor.py @@ -253,9 +253,9 @@ class Executor: ) if executor_response.size > threshold_size: raise ResponseSizeError( - f'{"File" if executor_response.is_file else "Text"} size is too large,' - f' max size is {threshold_size / 1024 / 1024:.2f} MB,' - f' but current size is {executor_response.readable_size}.' + f"{'File' if executor_response.is_file else 'Text'} size is too large," + f" max size is {threshold_size / 1024 / 1024:.2f} MB," + f" but current size is {executor_response.readable_size}." ) return executor_response @@ -338,7 +338,7 @@ class Executor: if self.auth.config and self.auth.config.header: authorization_header = self.auth.config.header if k.lower() == authorization_header.lower(): - raw += f'{k}: {"*" * len(v)}\r\n' + raw += f"{k}: {'*' * len(v)}\r\n" continue raw += f"{k}: {v}\r\n" diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index be82ad2a82..0f239af51a 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -1,5 +1,4 @@ import logging -import time from collections.abc import Mapping, Sequence from typing import Any, cast @@ -20,10 +19,8 @@ from core.workflow.entities.node_entities import NodeRunResult from core.workflow.nodes.base import BaseNode from core.workflow.nodes.enums import NodeType from extensions.ext_database import db -from extensions.ext_redis import redis_client from models.dataset import Dataset, Document from models.workflow import WorkflowNodeExecutionStatus -from services.feature_service import FeatureService from .entities import KnowledgeRetrievalNodeData from .exc import ( @@ -64,23 +61,6 @@ class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]): return NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, inputs=variables, error="Query is required." ) - # check rate limit - if self.tenant_id: - knowledge_rate_limit = FeatureService.get_knowledge_rate_limit(self.tenant_id) - if knowledge_rate_limit.enabled: - current_time = int(time.time() * 1000) - key = f"rate_limit_{self.tenant_id}" - redis_client.zadd(key, {current_time: current_time}) - redis_client.zremrangebyscore(key, 0, current_time - 60000) - request_count = redis_client.zcard(key) - if request_count > knowledge_rate_limit.limit: - return NodeRunResult( - status=WorkflowNodeExecutionStatus.FAILED, - inputs=variables, - error="Sorry, you have reached the knowledge base request rate limit of your subscription.", - error_type="RateLimitExceeded", - ) - # retrieve knowledge try: results = self._fetch_dataset_retriever(node_data=self.node_data, query=query) diff --git a/api/core/workflow/nodes/variable_assigner/v2/node.py b/api/core/workflow/nodes/variable_assigner/v2/node.py index 0c4aae827c..afa5656f46 100644 --- a/api/core/workflow/nodes/variable_assigner/v2/node.py +++ b/api/core/workflow/nodes/variable_assigner/v2/node.py @@ -1,4 +1,5 @@ import json +from collections.abc import Sequence from typing import Any, cast from core.variables import SegmentType, Variable @@ -31,7 +32,7 @@ class VariableAssignerNode(BaseNode[VariableAssignerNodeData]): inputs = self.node_data.model_dump() process_data: dict[str, Any] = {} # NOTE: This node has no outputs - updated_variables: list[Variable] = [] + updated_variable_selectors: list[Sequence[str]] = [] try: for item in self.node_data.items: @@ -98,7 +99,8 @@ class VariableAssignerNode(BaseNode[VariableAssignerNodeData]): value=item.value, ) variable = variable.model_copy(update={"value": updated_value}) - updated_variables.append(variable) + self.graph_runtime_state.variable_pool.add(variable.selector, variable) + updated_variable_selectors.append(variable.selector) except VariableOperatorNodeError as e: return NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, @@ -107,9 +109,15 @@ class VariableAssignerNode(BaseNode[VariableAssignerNodeData]): error=str(e), ) + # The `updated_variable_selectors` is a list contains list[str] which not hashable, + # remove the duplicated items first. + updated_variable_selectors = list(set(map(tuple, updated_variable_selectors))) + # Update variables - for variable in updated_variables: - self.graph_runtime_state.variable_pool.add(variable.selector, variable) + for selector in updated_variable_selectors: + variable = self.graph_runtime_state.variable_pool.get(selector) + if not isinstance(variable, Variable): + raise VariableNotFoundError(variable_selector=selector) process_data[variable.name] = variable.value if variable.selector[0] == CONVERSATION_VARIABLE_NODE_ID: diff --git a/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py b/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py index f89fae24a5..249bd14429 100644 --- a/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py +++ b/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py @@ -26,7 +26,7 @@ def handle(sender, **kwargs): tool_runtime=tool_runtime, provider_name=tool_entity.provider_name, provider_type=tool_entity.provider_type, - identity_id=f'WORKFLOW.{app.id}.{node_data.get("id")}', + identity_id=f"WORKFLOW.{app.id}.{node_data.get('id')}", ) manager.delete_tool_parameters_cache() except: diff --git a/api/fields/member_fields.py b/api/fields/member_fields.py index 0c854c640c..0900bffb8a 100644 --- a/api/fields/member_fields.py +++ b/api/fields/member_fields.py @@ -1,6 +1,6 @@ from flask_restful import fields # type: ignore -from libs.helper import TimestampField +from libs.helper import AvatarUrlField, TimestampField simple_account_fields = {"id": fields.String, "name": fields.String, "email": fields.String} @@ -8,6 +8,7 @@ account_fields = { "id": fields.String, "name": fields.String, "avatar": fields.String, + "avatar_url": AvatarUrlField, "email": fields.String, "is_password_set": fields.Boolean, "interface_language": fields.String, @@ -22,6 +23,7 @@ account_with_role_fields = { "id": fields.String, "name": fields.String, "avatar": fields.String, + "avatar_url": AvatarUrlField, "email": fields.String, "last_login_at": TimestampField, "last_active_at": TimestampField, diff --git a/api/libs/helper.py b/api/libs/helper.py index eaa4efdb71..4f14f010f4 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -41,6 +41,18 @@ class AppIconUrlField(fields.Raw): return None +class AvatarUrlField(fields.Raw): + def output(self, key, obj): + if obj is None: + return None + + from models.account import Account + + if isinstance(obj, Account) and obj.avatar is not None: + return file_helpers.get_signed_file_url(obj.avatar) + return None + + class TimestampField(fields.Raw): def format(self, value) -> int: return int(value.timestamp()) diff --git a/api/models/dataset.py b/api/models/dataset.py index 567f7db432..1cf3dc42fe 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -13,6 +13,7 @@ from typing import Any, cast from sqlalchemy import func from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.orm import Mapped from configs import dify_config from core.rag.retrieval.retrieval_methods import RetrievalMethod @@ -515,7 +516,7 @@ class DocumentSegment(db.Model): # type: ignore[name-defined] tenant_id = db.Column(StringUUID, nullable=False) dataset_id = db.Column(StringUUID, nullable=False) document_id = db.Column(StringUUID, nullable=False) - position = db.Column(db.Integer, nullable=False) + position: Mapped[int] content = db.Column(db.Text, nullable=False) answer = db.Column(db.Text, nullable=True) word_count = db.Column(db.Integer, nullable=False) diff --git a/api/poetry.lock b/api/poetry.lock index fe80545e7c..038979330f 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "aiofiles" @@ -6,6 +6,8 @@ version = "24.1.0" description = "File support for asyncio." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, @@ -17,6 +19,8 @@ version = "2.4.4" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8"}, {file = "aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745"}, @@ -28,6 +32,8 @@ version = "3.11.11" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aiohttp-3.11.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a60804bff28662cbcf340a4d61598891f12eea3a66af48ecfdc975ceec21e3c8"}, {file = "aiohttp-3.11.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b4fa1cb5f270fb3eab079536b764ad740bb749ce69a94d4ec30ceee1b5940d5"}, @@ -125,6 +131,8 @@ version = "2.9.1" description = "Simple retry client for aiohttp" optional = false python-versions = ">=3.7" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aiohttp_retry-2.9.1-py3-none-any.whl", hash = "sha256:66d2759d1921838256a05a3f80ad7e724936f083e35be5abb5e16eed6be6dc54"}, {file = "aiohttp_retry-2.9.1.tar.gz", hash = "sha256:8eb75e904ed4ee5c2ec242fefe85bf04240f685391c4879d8f541d6028ff01f1"}, @@ -139,6 +147,8 @@ version = "0.2.0" description = "MySQL driver for asyncio." optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aiomysql-0.2.0-py3-none-any.whl", hash = "sha256:b7c26da0daf23a5ec5e0b133c03d20657276e4eae9b73e040b72787f6f6ade0a"}, {file = "aiomysql-0.2.0.tar.gz", hash = "sha256:558b9c26d580d08b8c5fd1be23c5231ce3aeff2dadad989540fee740253deb67"}, @@ -157,6 +167,8 @@ version = "1.3.2" description = "aiosignal: a list of registered asynchronous callbacks" optional = false python-versions = ">=3.9" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, @@ -171,6 +183,8 @@ version = "1.14.0" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"}, {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"}, @@ -190,6 +204,8 @@ version = "0.3.6" description = "The alibabacloud credentials module of alibabaCloud Python SDK." optional = false python-versions = ">=3.6" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud_credentials-0.3.6.tar.gz", hash = "sha256:caa82cf258648dcbe1ca14aeba50ba21845567d6ac3cd48d318e0a445fff7f96"}, ] @@ -203,6 +219,8 @@ version = "0.0.3" description = "The endpoint-util module of alibabaCloud Python SDK." optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud_endpoint_util-0.0.3.tar.gz", hash = "sha256:8c0efb76fdcc3af4ca716ef24bbce770201a3f83f98c0afcf81655f684b9c7d2"}, ] @@ -216,6 +234,8 @@ version = "0.0.2" description = "Alibaba Cloud Gateway SPI SDK Library for Python" optional = false python-versions = ">=3.6" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud_gateway_spi-0.0.2.tar.gz", hash = "sha256:f932c8ba67291531dfbee6ca521dcf3523eb4ff93512bf0aaf135f2d4fc4704d"}, ] @@ -229,6 +249,8 @@ version = "3.8.3" description = "Alibaba Cloud AnalyticDB for PostgreSQL (20160503) SDK Library for Python" optional = false python-versions = ">=3.6" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud_gpdb20160503-3.8.3-py3-none-any.whl", hash = "sha256:06e1c46ce5e4e9d1bcae76e76e51034196c625799d06b2efec8d46a7df323fe8"}, {file = "alibabacloud_gpdb20160503-3.8.3.tar.gz", hash = "sha256:4dfcc0d9cff5a921d529d76f4bf97e2ceb9dc2fa53f00ab055f08509423d8e30"}, @@ -250,6 +272,8 @@ version = "0.2.2" description = "Aliyun Tea OpenApi Library for Python" optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud_openapi_util-0.2.2.tar.gz", hash = "sha256:ebbc3906f554cb4bf8f513e43e8a33e8b6a3d4a0ef13617a0e14c3dda8ef52a8"}, ] @@ -264,6 +288,8 @@ version = "2.0.0" description = "Alibaba Cloud OpenPlatform (20191219) SDK Library for Python" optional = false python-versions = ">=3.6" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud_openplatform20191219-2.0.0-py3-none-any.whl", hash = "sha256:873821c45bca72a6c6ec7a906c9cb21554c122e88893bbac3986934dab30dd36"}, {file = "alibabacloud_openplatform20191219-2.0.0.tar.gz", hash = "sha256:e67f4c337b7542538746592c6a474bd4ae3a9edccdf62e11a32ca61fad3c9020"}, @@ -281,6 +307,8 @@ version = "0.1.0" description = "Aliyun Tea OSS SDK Library for Python" optional = false python-versions = ">=3.6" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud_oss_sdk-0.1.0.tar.gz", hash = "sha256:cc5ce36044bae758047fccb56c0cb6204cbc362d18cc3dd4ceac54c8c0897b8b"}, ] @@ -298,6 +326,8 @@ version = "0.0.6" description = "The oss util module of alibabaCloud Python SDK." optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud_oss_util-0.0.6.tar.gz", hash = "sha256:d3ecec36632434bd509a113e8cf327dc23e830ac8d9dd6949926f4e334c8b5d6"}, ] @@ -311,6 +341,8 @@ version = "0.4.0" description = "The tea module of alibabaCloud Python SDK." optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud-tea-0.4.0.tar.gz", hash = "sha256:bdf72d747723bab190331b3c8593109fe2807504469bc0147f78c8c4945ed396"}, {file = "alibabacloud_tea-0.4.0-py3-none-any.whl", hash = "sha256:59fae5765e6654f884e130233df6fb61ca0fbe01a29ed0755a1cf099a3d4d863"}, @@ -326,6 +358,8 @@ version = "0.0.5" description = "The tea-fileform module of alibabaCloud Python SDK." optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud_tea_fileform-0.0.5.tar.gz", hash = "sha256:fd00a8c9d85e785a7655059e9651f9e91784678881831f60589172387b968ee8"}, ] @@ -339,6 +373,8 @@ version = "0.3.12" description = "Alibaba Cloud openapi SDK Library for Python" optional = false python-versions = ">=3.6" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud_tea_openapi-0.3.12.tar.gz", hash = "sha256:2e14809f357438e62c1ef4976a7655110dd54a75bbfa7d905fa3798355cfd974"}, ] @@ -356,6 +392,8 @@ version = "0.3.13" description = "The tea-util module of alibabaCloud Python SDK." optional = false python-versions = ">=3.6" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud_tea_util-0.3.13.tar.gz", hash = "sha256:8cbdfd2a03fbbf622f901439fa08643898290dd40e1d928347f6346e43f63c90"}, ] @@ -369,6 +407,8 @@ version = "0.0.2" description = "The tea-xml module of alibabaCloud Python SDK." optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alibabacloud_tea_xml-0.0.2.tar.gz", hash = "sha256:f0135e8148fd7d9c1f029db161863f37f144f837c280cba16c2edeb2f9c549d8"}, ] @@ -382,6 +422,8 @@ version = "2.16.0" description = "The core module of Aliyun Python SDK." optional = false python-versions = ">=3.7" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aliyun-python-sdk-core-2.16.0.tar.gz", hash = "sha256:651caad597eb39d4fad6cf85133dffe92837d53bdf62db9d8f37dab6508bb8f9"}, ] @@ -396,6 +438,8 @@ version = "2.16.5" description = "The kms module of Aliyun Python sdk." optional = false python-versions = "*" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aliyun-python-sdk-kms-2.16.5.tar.gz", hash = "sha256:f328a8a19d83ecbb965ffce0ec1e9930755216d104638cd95ecd362753b813b3"}, {file = "aliyun_python_sdk_kms-2.16.5-py2.py3-none-any.whl", hash = "sha256:24b6cdc4fd161d2942619479c8d050c63ea9cd22b044fe33b60bbb60153786f0"}, @@ -410,6 +454,8 @@ version = "5.3.1" description = "Low-level AMQP client for Python (fork of amqplib)." optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2"}, {file = "amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432"}, @@ -424,6 +470,8 @@ version = "9.0.1" description = "A library for parsing ISO 8601 strings." optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, @@ -438,6 +486,8 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -449,6 +499,8 @@ version = "0.23.1" description = "The official Python library for the anthropic API" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "anthropic-0.23.1-py3-none-any.whl", hash = "sha256:6dc5779dae83a5834864f4a4af0166c972b70f4cb8fd2765e1558282cc6d6242"}, {file = "anthropic-0.23.1.tar.gz", hash = "sha256:9325103702cbc96bb09d1b58c36bde75c726f6a01029fb4d85f41ebba07e9066"}, @@ -473,6 +525,8 @@ version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -494,6 +548,8 @@ version = "2.1.0" description = "Python wrapper for the arXiv API: https://arxiv.org/help/api/" optional = false python-versions = ">=3.7" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "arxiv-2.1.0-py3-none-any.whl", hash = "sha256:d634a0a59c9f05baf524eaa65563bb0a4532d2b4727a1162a1a9ba7e1e6e48cc"}, {file = "arxiv-2.1.0.tar.gz", hash = "sha256:eb4b1d5ab9dfd66027c344bb324c20be21d56fe15f6ce216ed5b209df747dea8"}, @@ -509,6 +565,8 @@ version = "3.8.1" description = "ASGI specs, helper code, and adapters" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, @@ -523,6 +581,8 @@ version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" and python_full_version < \"3.11.3\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, @@ -534,6 +594,8 @@ version = "23.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" +groups = ["main", "lint", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, @@ -553,6 +615,8 @@ version = "1.3.1" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = false python-versions = ">=3.8" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Authlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:d35800b973099bbadc49b42b256ecb80041ad56b7fe1216a362c7943c088f377"}, {file = "authlib-1.3.1.tar.gz", hash = "sha256:7ae843f03c06c5c0debd63c9db91f9fda64fa62a42a77419fa15fbb7e7a58917"}, @@ -567,6 +631,8 @@ version = "1.0.0b6" description = "Microsoft Azure AI Inference Client Library for Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "azure_ai_inference-1.0.0b6-py3-none-any.whl", hash = "sha256:5699ad78d70ec2d227a5eff2c1bafc845018f6624edc5b03589dfff861c54958"}, {file = "azure_ai_inference-1.0.0b6.tar.gz", hash = "sha256:b8ac941de1e69151bad464191e18856d4e74f962ae03235da137a9a326143676"}, @@ -587,6 +653,8 @@ version = "1.20.0" description = "Microsoft Azure Machine Learning Client Library for Python" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "azure-ai-ml-1.20.0.tar.gz", hash = "sha256:6432a0da1b7250cb0db5a1c33202e0419935e19ea32d4c2b3220705f8f1d4101"}, {file = "azure_ai_ml-1.20.0-py3-none-any.whl", hash = "sha256:c7eb3c5ccf82a6ee94403c3e5060763decd38cf03ff2620a4a6577526e605104"}, @@ -623,6 +691,8 @@ version = "1.1.28" description = "Microsoft Azure Client Library for Python (Common)" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3"}, {file = "azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad"}, @@ -634,6 +704,8 @@ version = "1.32.0" description = "Microsoft Azure Core Library for Python" optional = false python-versions = ">=3.8" +groups = ["main", "storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "azure_core-1.32.0-py3-none-any.whl", hash = "sha256:eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4"}, {file = "azure_core-1.32.0.tar.gz", hash = "sha256:22b3c35d6b2dae14990f6c1be2912bf23ffe50b220e708a28ab1bb92b1c730e5"}, @@ -653,6 +725,8 @@ version = "1.16.1" description = "Microsoft Azure Identity Library for Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "azure-identity-1.16.1.tar.gz", hash = "sha256:6d93f04468f240d59246d8afde3091494a5040d4f141cad0f49fc0c399d0d91e"}, {file = "azure_identity-1.16.1-py3-none-any.whl", hash = "sha256:8fb07c25642cd4ac422559a8b50d3e77f73dcc2bbfaba419d06d6c9d7cff6726"}, @@ -670,6 +744,8 @@ version = "1.5.0" description = "Microsoft Azure Management Core Library for Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "azure_mgmt_core-1.5.0-py3-none-any.whl", hash = "sha256:18aaa5a723ee8ae05bf1bfc9f6d0ffb996631c7ea3c922cc86f522973ce07b5f"}, {file = "azure_mgmt_core-1.5.0.tar.gz", hash = "sha256:380ae3dfa3639f4a5c246a7db7ed2d08374e88230fd0da3eb899f7c11e5c441a"}, @@ -684,6 +760,8 @@ version = "12.13.0" description = "Microsoft Azure Blob Storage Client Library for Python" optional = false python-versions = ">=3.6" +groups = ["main", "storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "azure-storage-blob-12.13.0.zip", hash = "sha256:53f0d4cd32970ac9ff9b9753f83dd2fb3f9ac30e1d01e71638c436c509bfd884"}, {file = "azure_storage_blob-12.13.0-py3-none-any.whl", hash = "sha256:280a6ab032845bab9627582bee78a50497ca2f14772929b5c5ee8b4605af0cb3"}, @@ -700,6 +778,8 @@ version = "12.8.0" description = "Microsoft Azure File DataLake Storage Client Library for Python" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "azure-storage-file-datalake-12.8.0.zip", hash = "sha256:12e6306e5efb5ca28e0ccd9fa79a2c61acd589866d6109fe5601b18509da92f4"}, {file = "azure_storage_file_datalake-12.8.0-py3-none-any.whl", hash = "sha256:b6cf5733fe794bf3c866efbe3ce1941409e35b6b125028ac558b436bf90f2de7"}, @@ -716,6 +796,8 @@ version = "12.20.0" description = "Microsoft Azure Azure File Share Storage Client Library for Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "azure_storage_file_share-12.20.0-py3-none-any.whl", hash = "sha256:fd5c4f09d7784d68b8ed3de473b7525904f1c4b115f9cd200c838b0ee720cb5f"}, {file = "azure_storage_file_share-12.20.0.tar.gz", hash = "sha256:f120fc67bae0a84c1b54d06faa70df351be14d1395b9a085350e833f7d347a65"}, @@ -736,6 +818,8 @@ version = "2.2.1" description = "Function decoration for backoff and retry" optional = false python-versions = ">=3.7,<4.0" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, @@ -747,6 +831,8 @@ version = "0.9.25" description = "BCE SDK for python" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,<4,>=2.7" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "bce_python_sdk-0.9.25-py3-none-any.whl", hash = "sha256:cd1ab4c887e163adba6bfb3cd40465a365e5f4255705a015b0cdbe768e649877"}, {file = "bce_python_sdk-0.9.25.tar.gz", hash = "sha256:93a0623fbb1bf3a58b4f2d7bdbd799a3b342a538f0c72950c77168e431470e86"}, @@ -763,6 +849,8 @@ version = "4.2.1" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "bcrypt-4.2.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17"}, {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f"}, @@ -801,6 +889,8 @@ version = "4.12.2" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" +groups = ["main", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, @@ -819,6 +909,8 @@ version = "4.2.1" description = "Python multiprocessing fork with improvements and bugfixes" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb"}, {file = "billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f"}, @@ -830,6 +922,8 @@ version = "1.9.0" description = "Fast, simple object-to-object and broadcast signaling" optional = false python-versions = ">=3.9" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}, {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}, @@ -841,6 +935,8 @@ version = "1.35.74" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "boto3-1.35.74-py3-none-any.whl", hash = "sha256:dab5bddbbe57dc707b6f6a1f25dc2823b8e234b6fe99fafef7fc406ab73031b9"}, {file = "boto3-1.35.74.tar.gz", hash = "sha256:88370c6845ba71a4dae7f6b357099df29b3965da584be040c8e72c9902bc9492"}, @@ -860,6 +956,8 @@ version = "1.35.94" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "botocore-1.35.94-py3-none-any.whl", hash = "sha256:d784d944865d8279c79d2301fc09ac28b5221d4e7328fb4e23c642c253b9932c"}, {file = "botocore-1.35.94.tar.gz", hash = "sha256:2b3309b356541faa4d88bb957dcac1d8004aa44953c0b7d4521a6cc5d3d5d6ba"}, @@ -879,6 +977,8 @@ version = "1.4.2" description = "Fast NumPy array functions written in C" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Bottleneck-1.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:125436df93751a226eab1732783aa8f6125e88e779587aa61be071fb66e41f9d"}, {file = "Bottleneck-1.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c6df9a60ec6ab88fec934ca864266ba95edd89c490af71dc9cd8afb2a54ebd9"}, @@ -930,6 +1030,8 @@ version = "1.1.0" description = "Python bindings for the Brotli compression library" optional = false python-versions = "*" +groups = ["main"] +markers = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\"" files = [ {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"}, {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"}, @@ -1064,6 +1166,8 @@ version = "1.1.0.0" description = "Python CFFI bindings to the Brotli library" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation == \"PyPy\"" files = [ {file = "brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851"}, {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b"}, @@ -1103,6 +1207,8 @@ version = "0.0.2" description = "Dummy package for Beautiful Soup (beautifulsoup4)" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc"}, {file = "bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925"}, @@ -1117,6 +1223,8 @@ version = "1.2.2.post1" description = "A simple, correct Python build frontend" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, @@ -1140,6 +1248,8 @@ version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, @@ -1151,6 +1261,8 @@ version = "5.4.0" description = "Distributed Task Queue." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "celery-5.4.0-py3-none-any.whl", hash = "sha256:369631eb580cf8c51a82721ec538684994f8277637edde2dfc0dacd73ed97f64"}, {file = "celery-5.4.0.tar.gz", hash = "sha256:504a19140e8d3029d5acad88330c541d4c3f64c789d85f94756762d8bca7e706"}, @@ -1207,6 +1319,8 @@ version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, @@ -1218,6 +1332,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main", "storage", "tools", "vdb"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -1287,6 +1402,7 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] +markers = {main = "python_version == \"3.11\" or python_version >= \"3.12\"", storage = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\"", tools = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation == \"PyPy\"", vdb = "python_version == \"3.11\" or python_version >= \"3.12\""} [package.dependencies] pycparser = "*" @@ -1297,6 +1413,8 @@ version = "5.1.0" description = "Universal encoding detector for Python 3" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"}, {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"}, @@ -1308,6 +1426,8 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -1409,6 +1529,8 @@ version = "0.7.6" description = "Chromas fork of hnswlib" optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "chroma_hnswlib-0.7.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f35192fbbeadc8c0633f0a69c3d3e9f1a4eab3a46b65458bbcbcabdd9e895c36"}, {file = "chroma_hnswlib-0.7.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f007b608c96362b8f0c8b6b2ac94f67f83fcbabd857c378ae82007ec92f4d82"}, @@ -1450,6 +1572,8 @@ version = "0.5.20" description = "Chroma." optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "chromadb-0.5.20-py3-none-any.whl", hash = "sha256:9550ba1b6dce911e35cac2568b301badf4b42f457b99a432bdeec2b6b9dd3680"}, {file = "chromadb-0.5.20.tar.gz", hash = "sha256:19513a23b2d20059866216bfd80195d1d4a160ffba234b8899f5e80978160ca7"}, @@ -1491,6 +1615,8 @@ version = "2.0.0" description = "Python Circuit Breaker pattern implementation" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "circuitbreaker-2.0.0-py2.py3-none-any.whl", hash = "sha256:c8c6f044b616cd5066368734ce4488020392c962b4bd2869d406d883c36d9859"}, {file = "circuitbreaker-2.0.0.tar.gz", hash = "sha256:28110761ca81a2accbd6b33186bc8c433e69b0933d85e89f280028dbb8c1dd14"}, @@ -1502,6 +1628,8 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main", "lint", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -1516,6 +1644,8 @@ version = "1.2.4" description = "click_default_group" optional = false python-versions = ">=2.7" +groups = ["lint"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "click_default_group-1.2.4-py2.py3-none-any.whl", hash = "sha256:9b60486923720e7fc61731bdb32b617039aba820e22e1c88766b1125592eaa5f"}, {file = "click_default_group-1.2.4.tar.gz", hash = "sha256:eb3f3c99ec0d456ca6cd2a7f08f7d4e91771bef51b01bdd9580cc6450fe1251e"}, @@ -1533,6 +1663,8 @@ version = "0.3.1" description = "Enables git-like *did-you-mean* feature in click" optional = false python-versions = ">=3.6.2" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c"}, {file = "click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463"}, @@ -1547,6 +1679,8 @@ version = "1.1.1" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, @@ -1564,6 +1698,8 @@ version = "0.3.0" description = "REPL plugin for Click" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, @@ -1582,6 +1718,8 @@ version = "0.7.19" description = "ClickHouse Database Core Driver for Python, Pandas, and Superset" optional = false python-versions = "~=3.8" +groups = ["tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "clickhouse-connect-0.7.19.tar.gz", hash = "sha256:ce8f21f035781c5ef6ff57dc162e8150779c009b59f14030ba61f8c9c10c06d0"}, {file = "clickhouse_connect-0.7.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ac74eb9e8d6331bae0303d0fc6bdc2125aa4c421ef646348b588760b38c29e9"}, @@ -1672,6 +1810,8 @@ version = "2.2.1" description = "Extended pickling support for Python objects" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cloudpickle-2.2.1-py3-none-any.whl", hash = "sha256:61f594d1f4c295fa5cd9014ceb3a1fc4a70b0de1164b94fbc2d854ccba056f9f"}, {file = "cloudpickle-2.2.1.tar.gz", hash = "sha256:d89684b8de9e34a2a43b3460fbca07d09d6e25ce858df4d5a44240403b6178f5"}, @@ -1683,6 +1823,8 @@ version = "1.2.71" description = "A Python module to bypass Cloudflare's anti-bot page." optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cloudscraper-1.2.71-py2.py3-none-any.whl", hash = "sha256:76f50ca529ed2279e220837befdec892626f9511708e200d48d5bb76ded679b0"}, {file = "cloudscraper-1.2.71.tar.gz", hash = "sha256:429c6e8aa6916d5bad5c8a5eac50f3ea53c9ac22616f6cb21b18dcc71517d0d3"}, @@ -1699,6 +1841,8 @@ version = "5.2.6" description = "" optional = false python-versions = "<4.0,>=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cohere-5.2.6-py3-none-any.whl", hash = "sha256:256b4ed00f47eb315401d7f28834655714f098382908e7d0ad5c98225aa6a57d"}, {file = "cohere-5.2.6.tar.gz", hash = "sha256:15d13682706fbafc8cf700e195f628389a643eb7ebd6d7c5e9d6e1ebd3f942fb"}, @@ -1719,10 +1863,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev", "lint", "tools", "vdb"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "python_version == \"3.11\" or python_version >= \"3.12\"", dev = "(python_version == \"3.11\" or python_version >= \"3.12\") and sys_platform == \"win32\"", lint = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_system == \"Windows\"", tools = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_system == \"Windows\"", vdb = "(python_version == \"3.11\" or python_version >= \"3.12\") and (platform_system == \"Windows\" or os_name == \"nt\" or sys_platform == \"win32\")"} [[package]] name = "coloredlogs" @@ -1730,6 +1876,8 @@ version = "15.0.1" description = "Colored terminal output for Python's logging module" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, @@ -1747,6 +1895,8 @@ version = "1.3.1" description = "Python library for calculating contours of 2D quadrilateral grids" optional = false python-versions = ">=3.10" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"}, {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"}, @@ -1820,6 +1970,8 @@ version = "1.9.30" description = "cos-python-sdk-v5" optional = false python-versions = "*" +groups = ["storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cos-python-sdk-v5-1.9.30.tar.gz", hash = "sha256:a23fd090211bf90883066d90cd74317860aa67c6d3aa80fe5e44b18c7e9b2a81"}, ] @@ -1837,6 +1989,8 @@ version = "4.3.4" description = "Python Client for Couchbase" optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "couchbase-4.3.4-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:395e7b05495132a071dce5cdd84a3ec6e803205875f8ee22e85a89a16bb1b5f4"}, {file = "couchbase-4.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:263a18307d1f1a141b93ae370b19843b1160dd702559152aea19dd08768f59f5"}, @@ -1877,6 +2031,8 @@ version = "7.2.7" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, @@ -1949,6 +2105,8 @@ version = "1.7" description = "CRC Generator" optional = false python-versions = "*" +groups = ["storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "crcmod-1.7.tar.gz", hash = "sha256:dc7051a0db5f2bd48665a990d3ec1cc305a466a77358ca4492826f41f283601e"}, ] @@ -1959,6 +2117,8 @@ version = "44.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092"}, @@ -2010,6 +2170,8 @@ version = "1.2.0" description = "cssselect parses CSS3 Selectors and translates them to XPath 1.0" optional = false python-versions = ">=3.7" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cssselect-1.2.0-py2.py3-none-any.whl", hash = "sha256:da1885f0c10b60c03ed5eccbb6b68d6eff248d91976fcde348f395d54c9fd35e"}, {file = "cssselect-1.2.0.tar.gz", hash = "sha256:666b19839cfaddb9ce9d36bfe4c969132c647b92fc9088c4e23f786b30f1b3dc"}, @@ -2021,6 +2183,8 @@ version = "0.12.1" description = "Composable style cycles" optional = false python-versions = ">=3.8" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, @@ -2036,6 +2200,8 @@ version = "1.17.1" description = "dashscope client sdk library" optional = false python-versions = ">=3.8.0" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "dashscope-1.17.1-py3-none-any.whl", hash = "sha256:1e07e7ff4544684797f86ede646766b5ab8f5bd6eb43d2d01f0f757a2941efe1"}, ] @@ -2054,6 +2220,8 @@ version = "0.34.0" description = "Lightning-fast JSON wizardry for Python dataclasses — effortless serialization right out of the box!" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "dataclass-wizard-0.34.0.tar.gz", hash = "sha256:f917db2220e395806a852f7c57e9011dd783b7fe3eee763bb56ae2d48968ab03"}, {file = "dataclass_wizard-0.34.0-py2.py3-none-any.whl", hash = "sha256:9c184edd3526c3523fec2de5b6d6cdfcdc97ed7b2c5ba8bc574284b793704f01"}, @@ -2075,6 +2243,8 @@ version = "0.6.7" description = "Easily serialize dataclasses to and from JSON." optional = false python-versions = "<4.0,>=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, @@ -2090,6 +2260,8 @@ version = "1.3.1" description = "Pandas Data Types for SQL systems (BigQuery, Spanner)" optional = false python-versions = ">=3.7" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "db_dtypes-1.3.1-py2.py3-none-any.whl", hash = "sha256:fbc9d1740d94aaf2b5ae24601cfc875a69b4635bb9d049e3c3036e9f10203af8"}, {file = "db_dtypes-1.3.1.tar.gz", hash = "sha256:a058f05dab100891f3e76a7a3db9ad0f107f18dd3d1bdd13680749a2f07eae77"}, @@ -2107,6 +2279,8 @@ version = "5.1.1" description = "Decorators for Humans" optional = false python-versions = ">=3.5" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, @@ -2118,6 +2292,8 @@ version = "0.7.1" description = "XML bomb protection for Python stdlib modules" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -2129,6 +2305,8 @@ version = "1.2.15" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"}, @@ -2146,6 +2324,8 @@ version = "2.1.0" description = "A library to handle automated deprecations" optional = false python-versions = "*" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, @@ -2160,6 +2340,8 @@ version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, @@ -2175,6 +2357,8 @@ version = "1.9.0" description = "Distro - an OS platform information API" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, @@ -2186,6 +2370,8 @@ version = "7.1.0" description = "A Python library for the Docker Engine API." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, @@ -2208,6 +2394,8 @@ version = "0.16" description = "Parse Python docstrings in reST, Google and Numpydoc format" optional = false python-versions = ">=3.6,<4.0" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, @@ -2219,6 +2407,8 @@ version = "0.5.0" description = "Linting dotenv files like a charm!" optional = false python-versions = ">=3.9,<4.0" +groups = ["lint"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "dotenv_linter-0.5.0-py3-none-any.whl", hash = "sha256:fd01cca7f2140cb1710f49cbc1bf0e62397a75a6f0522d26a8b9b2331143c8bd"}, {file = "dotenv_linter-0.5.0.tar.gz", hash = "sha256:4862a8393e5ecdfb32982f1b32dbc006fff969a7b3c8608ba7db536108beeaea"}, @@ -2237,6 +2427,8 @@ version = "1.1.3" description = "DuckDB in-process database" optional = false python-versions = ">=3.7.0" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "duckdb-1.1.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:1c0226dc43e2ee4cc3a5a4672fddb2d76fd2cf2694443f395c02dd1bea0b7fce"}, {file = "duckdb-1.1.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7c71169fa804c0b65e49afe423ddc2dc83e198640e3b041028da8110f7cd16f7"}, @@ -2298,6 +2490,8 @@ version = "6.3.7" description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine." optional = false python-versions = ">=3.8" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "duckduckgo_search-6.3.7-py3-none-any.whl", hash = "sha256:6a831a27977751e8928222f04c99a5d069ff80e2a7c78b699c9b9ac6cb48c41b"}, {file = "duckduckgo_search-6.3.7.tar.gz", hash = "sha256:53d84966429a6377647e2a1ea7224b657575c7a4d506729bdb837e4ee12915ed"}, @@ -2317,6 +2511,8 @@ version = "0.9" description = "Module for converting between datetime.timedelta and Go's Duration strings." optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"}, {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, @@ -2328,6 +2524,8 @@ version = "8.17.0" description = "Transport classes and utilities shared among Python Elastic client libraries" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "elastic_transport-8.17.0-py3-none-any.whl", hash = "sha256:59f553300866750e67a38828fede000576562a0e66930c641adb75249e0c95af"}, {file = "elastic_transport-8.17.0.tar.gz", hash = "sha256:e755f38f99fa6ec5456e236b8e58f0eb18873ac8fe710f74b91a16dd562de2a5"}, @@ -2346,6 +2544,8 @@ version = "8.14.0" description = "Python client for Elasticsearch" optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "elasticsearch-8.14.0-py3-none-any.whl", hash = "sha256:cef8ef70a81af027f3da74a4f7d9296b390c636903088439087b8262a468c130"}, {file = "elasticsearch-8.14.0.tar.gz", hash = "sha256:aa2490029dd96f4015b333c1827aa21fd6c0a4d223b00dfb0fe933b8d09a511b"}, @@ -2366,6 +2566,8 @@ version = "2.14.0" description = "Emoji for Python" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "emoji-2.14.0-py3-none-any.whl", hash = "sha256:fcc936bf374b1aec67dda5303ae99710ba88cc9cdce2d1a71c5f2204e6d78799"}, {file = "emoji-2.14.0.tar.gz", hash = "sha256:f68ac28915a2221667cddb3e6c589303c3c6954c6c5af6fefaec7f9bdf72fdca"}, @@ -2380,6 +2582,8 @@ version = "3.24.6.1" description = "OBS Python SDK" optional = false python-versions = "*" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "esdk-obs-python-3.24.6.1.tar.gz", hash = "sha256:c45fed143e99d9256c8560c1d78f651eae0d2e809d16e962f8b286b773c33bf0"}, ] @@ -2393,6 +2597,8 @@ version = "2.0.0" description = "An implementation of lxml.xmlfile for the standard library" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"}, {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"}, @@ -2404,6 +2610,8 @@ version = "0.2.2" description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"}, {file = "eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"}, @@ -2418,6 +2626,8 @@ version = "32.1.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Faker-32.1.0-py3-none-any.whl", hash = "sha256:c77522577863c264bdc9dad3a2a750ad3f7ee43ff8185072e482992288898814"}, {file = "faker-32.1.0.tar.gz", hash = "sha256:aac536ba04e6b7beb2332c67df78485fc29c1880ff723beac6d1efd45e2f10f5"}, @@ -2433,6 +2643,8 @@ version = "0.5.6" description = "Python client for fal.ai" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fal_client-0.5.6-py3-none-any.whl", hash = "sha256:631fd857a3c44753ee46a2eea1e7276471453aca58faac9c3702f744c7c84050"}, {file = "fal_client-0.5.6.tar.gz", hash = "sha256:d3afc4b6250023d0ee8437ec504558231d3b106d7aabc12cda8c39883faddecb"}, @@ -2453,6 +2665,8 @@ version = "0.115.6" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fastapi-0.115.6-py3-none-any.whl", hash = "sha256:e9240b29e36fa8f4bb7290316988e90c381e5092e0cbe84e7818cc3713bcf305"}, {file = "fastapi-0.115.6.tar.gz", hash = "sha256:9ec46f7addc14ea472958a96aae5b5de65f39721a46aaf5705c480d9a8b76654"}, @@ -2473,6 +2687,8 @@ version = "1.10.0" description = "Fast read/write of AVRO files" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fastavro-1.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1a9fe0672d2caf0fe54e3be659b13de3cad25a267f2073d6f4b9f8862acc31eb"}, {file = "fastavro-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86dd0410770e0c99363788f0584523709d85e57bb457372ec5c285a482c17fe6"}, @@ -2519,6 +2735,8 @@ version = "0.0.4" description = "Find the feed URLs for a website." optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "feedfinder2-0.0.4.tar.gz", hash = "sha256:3701ee01a6c85f8b865a049c30ba0b4608858c803fe8e30d1d289fdbe89d0efe"}, ] @@ -2534,6 +2752,8 @@ version = "6.0.10" description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds" optional = false python-versions = ">=3.6" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "feedparser-6.0.10-py3-none-any.whl", hash = "sha256:79c257d526d13b944e965f6095700587f27388e50ea16fd245babe4dfae7024f"}, {file = "feedparser-6.0.10.tar.gz", hash = "sha256:27da485f4637ce7163cdeab13a80312b93b7d0c1b775bef4a47629a3110bca51"}, @@ -2548,6 +2768,8 @@ version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" +groups = ["main", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, @@ -2564,6 +2786,8 @@ version = "1.2.0" description = "Infer file type and MIME type of any file/buffer. No external dependencies." optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"}, {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, @@ -2575,6 +2799,8 @@ version = "0.7.0" description = "A library for automatically generating command line interfaces." optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fire-0.7.0.tar.gz", hash = "sha256:961550f07936eaf65ad1dc8360f2b2bf8408fad46abbfa4d2a3794f8d2a95cdf"}, ] @@ -2588,6 +2814,8 @@ version = "0.9.7.1" description = "Extract swagger specs from your flask project" optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "flasgger-0.9.7.1.tar.gz", hash = "sha256:ca098e10bfbb12f047acc6299cc70a33851943a746e550d86e65e60d4df245fb"}, ] @@ -2606,6 +2834,8 @@ version = "3.1.0" description = "A simple framework for building complex web applications." optional = false python-versions = ">=3.9" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "flask-3.1.0-py3-none-any.whl", hash = "sha256:d667207822eb83f1c4b50949b1623c8fc8d51f2341d65f72e1a1815397551136"}, {file = "flask-3.1.0.tar.gz", hash = "sha256:5f873c5184c897c8d9d1b05df1e3d01b14910ce69607a117bd3277098a5836ac"}, @@ -2628,6 +2858,8 @@ version = "1.17" description = "Compress responses in your Flask app with gzip, deflate, brotli or zstandard." optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Flask_Compress-1.17-py3-none-any.whl", hash = "sha256:415131f197c41109f08e8fdfc3a6628d83d81680fb5ecd0b3a97410e02397b20"}, {file = "flask_compress-1.17.tar.gz", hash = "sha256:1ebb112b129ea7c9e7d6ee6d5cc0d64f226cbc50c4daddf1a58b9bd02253fbd8"}, @@ -2648,6 +2880,8 @@ version = "4.0.2" description = "A Flask extension adding a decorator for CORS support" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Flask_Cors-4.0.2-py2.py3-none-any.whl", hash = "sha256:38364faf1a7a5d0a55bd1d2e2f83ee9e359039182f5e6a029557e1f56d92c09a"}, {file = "flask_cors-4.0.2.tar.gz", hash = "sha256:493b98e2d1e2f1a4720a7af25693ef2fe32fbafec09a2f72c59f3e475eda61d2"}, @@ -2662,6 +2896,8 @@ version = "0.6.3" description = "User authentication and session management for Flask." optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Flask-Login-0.6.3.tar.gz", hash = "sha256:5e23d14a607ef12806c699590b89d0f0e0d67baeec599d75947bf9c147330333"}, {file = "Flask_Login-0.6.3-py3-none-any.whl", hash = "sha256:849b25b82a436bf830a054e74214074af59097171562ab10bfa999e6b78aae5d"}, @@ -2677,6 +2913,8 @@ version = "4.0.7" description = "SQLAlchemy database migrations for Flask applications using Alembic." optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Flask-Migrate-4.0.7.tar.gz", hash = "sha256:dff7dd25113c210b069af280ea713b883f3840c1e3455274745d7355778c8622"}, {file = "Flask_Migrate-4.0.7-py3-none-any.whl", hash = "sha256:5c532be17e7b43a223b7500d620edae33795df27c75811ddf32560f7d48ec617"}, @@ -2693,6 +2931,8 @@ version = "0.3.10" description = "Simple framework for creating REST APIs" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Flask-RESTful-0.3.10.tar.gz", hash = "sha256:fe4af2ef0027df8f9b4f797aba20c5566801b6ade995ac63b588abf1a59cec37"}, {file = "Flask_RESTful-0.3.10-py2.py3-none-any.whl", hash = "sha256:1cf93c535172f112e080b0d4503a8d15f93a48c88bdd36dd87269bdaf405051b"}, @@ -2713,6 +2953,8 @@ version = "0.7.0" description = "WebSocket support for Flask" optional = false python-versions = ">=3.6" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "flask-sock-0.7.0.tar.gz", hash = "sha256:e023b578284195a443b8d8bdb4469e6a6acf694b89aeb51315b1a34fcf427b7d"}, {file = "flask_sock-0.7.0-py3-none-any.whl", hash = "sha256:caac4d679392aaf010d02fabcf73d52019f5bdaf1c9c131ec5a428cb3491204a"}, @@ -2731,6 +2973,8 @@ version = "3.1.1" description = "Add SQLAlchemy support to your Flask application." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0"}, {file = "flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312"}, @@ -2746,6 +2990,8 @@ version = "24.12.23" description = "The FlatBuffers serialization format for Python" optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "flatbuffers-24.12.23-py2.py3-none-any.whl", hash = "sha256:c418e0d48890f4142b92fd3e343e73a48f194e1f80075ddcc5793779b3585444"}, {file = "flatbuffers-24.12.23.tar.gz", hash = "sha256:2910b0bc6ae9b6db78dd2b18d0b7a0709ba240fb5585f286a3a2b30785c22dac"}, @@ -2757,6 +3003,8 @@ version = "1.6.1" description = "An Utility to get ttf/otf font metadata" optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fontmeta-1.6.1.tar.gz", hash = "sha256:837e5bc4da879394b41bda1428a8a480eb7c4e993799a93cfb582bab771a9c24"}, ] @@ -2770,6 +3018,8 @@ version = "4.55.3" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fonttools-4.55.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1dcc07934a2165ccdc3a5a608db56fb3c24b609658a5b340aee4ecf3ba679dc0"}, {file = "fonttools-4.55.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f7d66c15ba875432a2d2fb419523f5d3d347f91f48f57b8b08a2dfc3c39b8a3f"}, @@ -2843,6 +3093,8 @@ version = "2.4.6" description = "A simple immutable dictionary" optional = false python-versions = ">=3.6" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "frozendict-2.4.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c3a05c0a50cab96b4bb0ea25aa752efbfceed5ccb24c007612bc63e51299336f"}, {file = "frozendict-2.4.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f5b94d5b07c00986f9e37a38dd83c13f5fe3bf3f1ccc8e88edea8fe15d6cd88c"}, @@ -2891,6 +3143,8 @@ version = "1.5.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, @@ -2992,6 +3246,8 @@ version = "2024.12.0" description = "File-system specification" optional = false python-versions = ">=3.8" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fsspec-2024.12.0-py3-none-any.whl", hash = "sha256:b520aed47ad9804237ff878b504267a3b0b441e97508bd6d2d8774e3db85cee2"}, {file = "fsspec-2024.12.0.tar.gz", hash = "sha256:670700c977ed2fb51e0d9f9253177ed20cbde4a3e5c0283cc5385b5870c8533f"}, @@ -3031,6 +3287,8 @@ version = "1.0.0" description = "Clean single-source support for Python 3 and 2" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, @@ -3042,6 +3300,8 @@ version = "24.11.1" description = "Coroutine-based network library" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "gevent-24.11.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:92fe5dfee4e671c74ffaa431fd7ffd0ebb4b339363d24d0d944de532409b935e"}, {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7bfcfe08d038e1fa6de458891bca65c1ada6d145474274285822896a858c870"}, @@ -3102,6 +3362,8 @@ version = "2.2.1" description = "gmpy2 interface to GMP, MPFR, and MPC for Python 3.7+" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "gmpy2-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:431d599e1542b6e0b3618d3e296702c25215c97fb461d596e27adbe69d765dc6"}, {file = "gmpy2-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e51848975837751d1038e82d006e8bb488b179f093ba7fc8a59e1d8a2c61663"}, @@ -3164,6 +3426,8 @@ version = "3.0.0" description = "Python bindings to the Google search engine." optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google-3.0.0-py2.py3-none-any.whl", hash = "sha256:889cf695f84e4ae2c55fbc0cfdaf4c1e729417fa52ab1db0485202ba173e4935"}, {file = "google-3.0.0.tar.gz", hash = "sha256:143530122ee5130509ad5e989f0512f7cb218b2d4eddbafbad40fd10e8d8ccbe"}, @@ -3178,6 +3442,8 @@ version = "0.6.9" description = "Google Ai Generativelanguage API client library" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google_ai_generativelanguage-0.6.9-py3-none-any.whl", hash = "sha256:50360cd80015d1a8cc70952e98560f32fa06ddee2e8e9f4b4b98e431dc561e0b"}, {file = "google_ai_generativelanguage-0.6.9.tar.gz", hash = "sha256:899f1d3a06efa9739f1cd9d2788070178db33c89d4a76f2e8f4da76f649155fa"}, @@ -3195,6 +3461,8 @@ version = "2.18.0" description = "Google API client core library" optional = false python-versions = ">=3.7" +groups = ["main", "storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"}, {file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"}, @@ -3220,6 +3488,8 @@ version = "2.90.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03"}, {file = "google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913"}, @@ -3238,6 +3508,8 @@ version = "2.29.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, @@ -3261,6 +3533,8 @@ version = "0.2.0" description = "Google Authentication Library: httplib2 transport" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, @@ -3276,6 +3550,8 @@ version = "1.49.0" description = "Vertex AI API client library" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google-cloud-aiplatform-1.49.0.tar.gz", hash = "sha256:e6e6d01079bb5def49e4be4db4d12b13c624b5c661079c869c13c855e5807429"}, {file = "google_cloud_aiplatform-1.49.0-py2.py3-none-any.whl", hash = "sha256:8072d9e0c18d8942c704233d1a93b8d6312fc7b278786a283247950e28ae98df"}, @@ -3323,6 +3599,8 @@ version = "3.27.0" description = "Google BigQuery API client library" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google_cloud_bigquery-3.27.0-py2.py3-none-any.whl", hash = "sha256:b53b0431e5ba362976a4cd8acce72194b4116cdf8115030c7b339b884603fcc3"}, {file = "google_cloud_bigquery-3.27.0.tar.gz", hash = "sha256:379c524054d7b090fa56d0c22662cc6e6458a6229b6754c0e7177e3a73421d2c"}, @@ -3354,6 +3632,8 @@ version = "2.4.1" description = "Google Cloud API client core library" optional = false python-versions = ">=3.7" +groups = ["main", "storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, @@ -3372,6 +3652,8 @@ version = "1.14.0" description = "Google Cloud Resource Manager API client library" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google_cloud_resource_manager-1.14.0-py2.py3-none-any.whl", hash = "sha256:4860c3ea9ace760b317ea90d4e27f1b32e54ededdcc340a7cb70c8ef238d8f7c"}, {file = "google_cloud_resource_manager-1.14.0.tar.gz", hash = "sha256:daa70a3a4704759d31f812ed221e3b6f7b660af30c7862e4a0060ea91291db30"}, @@ -3390,6 +3672,8 @@ version = "2.16.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" +groups = ["main", "storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f"}, {file = "google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852"}, @@ -3412,6 +3696,8 @@ version = "1.6.0" description = "A python wrapper of the C library 'Google CRC32C'" optional = false python-versions = ">=3.9" +groups = ["main", "storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa"}, {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9"}, @@ -3451,6 +3737,8 @@ version = "0.8.1" description = "Google Generative AI High level API client library and tools." optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google_generativeai-0.8.1-py3-none-any.whl", hash = "sha256:b031877f24d51af0945207657c085896a0a886eceec7a1cb7029327b0aa6e2f6"}, ] @@ -3474,6 +3762,8 @@ version = "0.2.0" description = "pasta is an AST-based Python refactoring library" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google-pasta-0.2.0.tar.gz", hash = "sha256:c9f2c8dfc8f96d0d5808299920721be30c9eec37f2389f28904f454565c8a16e"}, {file = "google_pasta-0.2.0-py2-none-any.whl", hash = "sha256:4612951da876b1a10fe3960d7226f0c7682cf901e16ac06e473b267a5afa8954"}, @@ -3489,6 +3779,8 @@ version = "2.7.2" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = false python-versions = ">=3.7" +groups = ["main", "storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, @@ -3507,6 +3799,8 @@ version = "1.63.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, @@ -3525,6 +3819,8 @@ version = "2.11.1" description = "Python Client Library for Supabase Auth" optional = false python-versions = "<4.0,>=3.9" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "gotrue-2.11.1-py3-none-any.whl", hash = "sha256:1b2d915bdc65fd0ad608532759ce9c72fa2e910145c1e6901f2188519e7bcd2d"}, {file = "gotrue-2.11.1.tar.gz", hash = "sha256:5594ceee60bd873e5f4fdd028b08dece3906f6013b6ed08e7786b71c0092fed0"}, @@ -3540,6 +3836,7 @@ version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" +groups = ["main", "tools", "vdb"] files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, @@ -3615,6 +3912,7 @@ files = [ {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] +markers = {main = "(python_version == \"3.11\" or python_version >= \"3.12\") and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_python_implementation == \"CPython\")", tools = "(python_version == \"3.11\" or python_version >= \"3.12\") and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")", vdb = "(python_version == \"3.11\" or python_version >= \"3.12\") and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} [package.extras] docs = ["Sphinx", "furo"] @@ -3626,6 +3924,8 @@ version = "0.14.0" description = "IAM API client library" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "grpc_google_iam_v1-0.14.0-py2.py3-none-any.whl", hash = "sha256:fb4a084b30099ba3ab07d61d620a0d4429570b13ff53bd37bac75235f98b7da4"}, {file = "grpc_google_iam_v1-0.14.0.tar.gz", hash = "sha256:c66e07aa642e39bb37950f9e7f491f70dad150ac9801263b42b2814307c2df99"}, @@ -3642,6 +3942,8 @@ version = "1.67.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "grpcio-1.67.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f"}, {file = "grpcio-1.67.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d"}, @@ -3709,6 +4011,8 @@ version = "1.62.3" description = "Status proto mapping for gRPC" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485"}, {file = "grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8"}, @@ -3725,6 +4029,8 @@ version = "1.62.3" description = "Protobuf code generator for gRPC" optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "grpcio-tools-1.62.3.tar.gz", hash = "sha256:7c7136015c3d62c3eef493efabaf9e3380e3e66d24ee8e94c01cb71377f57833"}, {file = "grpcio_tools-1.62.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2f968b049c2849540751ec2100ab05e8086c24bead769ca734fdab58698408c1"}, @@ -3787,6 +4093,8 @@ version = "23.0.0" description = "WSGI HTTP Server for UNIX" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"}, {file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"}, @@ -3808,6 +4116,8 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -3819,6 +4129,8 @@ version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" optional = false python-versions = ">=3.6.1" +groups = ["storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, @@ -3834,6 +4146,8 @@ version = "3.1.0" description = "Python wrapper for hiredis" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:2892db9db21f0cf7cc298d09f85d3e1f6dc4c4c24463ab67f79bc7a006d51867"}, {file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:93cfa6cc25ee2ceb0be81dc61eca9995160b9e16bdb7cca4a00607d57e998918"}, @@ -3952,6 +4266,8 @@ version = "4.0.0" description = "Pure-Python HPACK header compression" optional = false python-versions = ">=3.6.1" +groups = ["storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, @@ -3963,6 +4279,8 @@ version = "1.1" description = "HTML parser based on the WHATWG HTML specification" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, @@ -3984,6 +4302,8 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -4005,6 +4325,8 @@ version = "0.22.0" description = "A comprehensive HTTP client library." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, @@ -4019,6 +4341,8 @@ version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = false python-versions = ">=3.8.0" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, @@ -4074,6 +4398,8 @@ version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, @@ -4101,6 +4427,8 @@ version = "0.4.0" description = "Consume Server-Sent Event (SSE) messages with HTTPX." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, @@ -4112,6 +4440,8 @@ version = "0.16.4" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.7.0" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "huggingface_hub-0.16.4-py3-none-any.whl", hash = "sha256:0d3df29932f334fead024afc7cb4cc5149d955238b8b5e42dcf9740d6995a349"}, {file = "huggingface_hub-0.16.4.tar.gz", hash = "sha256:608c7d4f3d368b326d1747f91523dbd1f692871e8e2e7a4750314a2dd8b63e14"}, @@ -4144,6 +4474,8 @@ version = "10.0" description = "Human friendly output for text interfaces using Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, @@ -4158,6 +4490,8 @@ version = "6.0.1" description = "HTTP/2 framing layer for Python" optional = false python-versions = ">=3.6.1" +groups = ["storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, @@ -4169,6 +4503,8 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -4183,6 +4519,8 @@ version = "6.11.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, @@ -4202,6 +4540,8 @@ version = "6.5.2" description = "Read resources from Python packages" optional = false python-versions = ">=3.9" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec"}, {file = "importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c"}, @@ -4221,6 +4561,8 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -4232,6 +4574,8 @@ version = "0.7.2" description = "An ISO 8601 date/time/duration parser and formatter" optional = false python-versions = ">=3.7" +groups = ["main", "storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, @@ -4243,6 +4587,8 @@ version = "2.2.0" description = "Safely pass data to untrusted environments and back." optional = false python-versions = ">=3.8" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, @@ -4254,6 +4600,8 @@ version = "0.42.1" description = "Chinese Words Segmentation Utilities" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jieba-0.42.1.tar.gz", hash = "sha256:055ca12f62674fafed09427f176506079bc135638a14e23e25be909131928db2"}, ] @@ -4264,6 +4612,8 @@ version = "0.35.1" description = "Chinese Words Segementation Utilities" optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jieba3k-0.35.1.zip", hash = "sha256:980a4f2636b778d312518066be90c7697d410dd5a472385f5afced71a2db1c10"}, ] @@ -4274,6 +4624,8 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -4291,6 +4643,8 @@ version = "0.8.2" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, @@ -4376,6 +4730,8 @@ version = "0.10.0" description = "JSON Matching Expressions" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main", "storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, @@ -4387,6 +4743,8 @@ version = "1.4.2" description = "Lightweight pipelining with Python functions" optional = false python-versions = ">=3.8" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, @@ -4398,6 +4756,8 @@ version = "4.0.0" description = "Library with helpers for the jsonlines file format" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonlines-4.0.0-py3-none-any.whl", hash = "sha256:185b334ff2ca5a91362993f42e83588a360cf95ce4b71a73548502bda52a7c55"}, {file = "jsonlines-4.0.0.tar.gz", hash = "sha256:0c6d2c09117550c089995247f605ae4cf77dd1533041d366351f6f298822ea74"}, @@ -4412,6 +4772,8 @@ version = "1.6.1" description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming." optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonpath-ng-1.6.1.tar.gz", hash = "sha256:086c37ba4917304850bd837aeab806670224d3f038fe2833ff593a672ef0a5fa"}, {file = "jsonpath_ng-1.6.1-py3-none-any.whl", hash = "sha256:8f22cd8273d7772eea9aaa84d922e0841aa36fdb8a2c6b7f6c3791a16a9bc0be"}, @@ -4426,6 +4788,8 @@ version = "1.0.6" description = "A more powerful JSONPath implementation in modern python" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonpath-python-1.0.6.tar.gz", hash = "sha256:dd5be4a72d8a2995c3f583cf82bf3cd1a9544cfdabf2d22595b67aff07349666"}, {file = "jsonpath_python-1.0.6-py3-none-any.whl", hash = "sha256:1e3b78df579f5efc23565293612decee04214609208a2335884b3ee3f786b575"}, @@ -4437,6 +4801,8 @@ version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, @@ -4458,6 +4824,8 @@ version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.9" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, @@ -4472,6 +4840,8 @@ version = "0.2.1" description = "Static image export for web-based visualization libraries with zero dependencies" optional = false python-versions = "*" +groups = ["indirect", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "kaleido-0.2.1-py2.py3-none-macosx_10_11_x86_64.whl", hash = "sha256:ca6f73e7ff00aaebf2843f73f1d3bacde1930ef5041093fe76b83a15785049a7"}, {file = "kaleido-0.2.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:bb9a5d1f710357d5d432ee240ef6658a6d124c3e610935817b4b42da9c787c05"}, @@ -4487,6 +4857,8 @@ version = "1.4.8" description = "A fast implementation of the Cassowary constraint solver" optional = false python-versions = ">=3.10" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"}, {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"}, @@ -4576,6 +4948,8 @@ version = "5.4.2" description = "Messaging library for Python." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "kombu-5.4.2-py3-none-any.whl", hash = "sha256:14212f5ccf022fc0a70453bb025a1dcc32782a588c49ea866884047d66e14763"}, {file = "kombu-5.4.2.tar.gz", hash = "sha256:eef572dd2fd9fc614b37580e3caeafdd5af46c1eff31e7fba89138cdb406f2cf"}, @@ -4609,6 +4983,8 @@ version = "31.0.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, @@ -4636,6 +5012,8 @@ version = "1.0.9" description = "Language detection library ported from Google's language-detection." optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"}, {file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"}, @@ -4650,6 +5028,8 @@ version = "2.51.5" description = "A client library for accessing langfuse" optional = false python-versions = "<4.0,>=3.8.1" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "langfuse-2.51.5-py3-none-any.whl", hash = "sha256:b95401ca710ef94b521afa6541933b6f93d7cfd4a97523c8fc75bca4d6d219fb"}, {file = "langfuse-2.51.5.tar.gz", hash = "sha256:55bc37b5c5d3ae133c1a95db09117cfb3117add110ba02ebbf2ce45ac4395c5b"}, @@ -4675,6 +5055,8 @@ version = "0.1.147" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "langsmith-0.1.147-py3-none-any.whl", hash = "sha256:7166fc23b965ccf839d64945a78e9f1157757add228b086141eb03a60d699a15"}, {file = "langsmith-0.1.147.tar.gz", hash = "sha256:2e933220318a4e73034657103b3b1a3a6109cc5db3566a7e8e03be8d6d7def7a"}, @@ -4693,12 +5075,146 @@ requests-toolbelt = ">=1.0.0,<2.0.0" [package.extras] langsmith-pyo3 = ["langsmith-pyo3 (>=0.1.0rc2,<0.2.0)"] +[[package]] +name = "levenshtein" +version = "0.26.1" +description = "Python extension for computing string edit distances and similarities." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "levenshtein-0.26.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8dc4a4aecad538d944a1264c12769c99e3c0bf8e741fc5e454cc954913befb2e"}, + {file = "levenshtein-0.26.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec108f368c12b25787c8b1a4537a1452bc53861c3ee4abc810cc74098278edcd"}, + {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69229d651c97ed5b55b7ce92481ed00635cdbb80fbfb282a22636e6945dc52d5"}, + {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79dcd157046d62482a7719b08ba9e3ce9ed3fc5b015af8ea989c734c702aedd4"}, + {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f53f9173ae21b650b4ed8aef1d0ad0c37821f367c221a982f4d2922b3044e0d"}, + {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3956f3c5c229257dbeabe0b6aacd2c083ebcc1e335842a6ff2217fe6cc03b6b"}, + {file = "levenshtein-0.26.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1e83af732726987d2c4cd736f415dae8b966ba17b7a2239c8b7ffe70bfb5543"}, + {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4f052c55046c2a9c9b5f742f39e02fa6e8db8039048b8c1c9e9fdd27c8a240a1"}, + {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9895b3a98f6709e293615fde0dcd1bb0982364278fa2072361a1a31b3e388b7a"}, + {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a3777de1d8bfca054465229beed23994f926311ce666f5a392c8859bb2722f16"}, + {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:81c57e1135c38c5e6e3675b5e2077d8a8d3be32bf0a46c57276c092b1dffc697"}, + {file = "levenshtein-0.26.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:91d5e7d984891df3eff7ea9fec8cf06fdfacc03cd074fd1a410435706f73b079"}, + {file = "levenshtein-0.26.1-cp310-cp310-win32.whl", hash = "sha256:f48abff54054b4142ad03b323e80aa89b1d15cabc48ff49eb7a6ff7621829a56"}, + {file = "levenshtein-0.26.1-cp310-cp310-win_amd64.whl", hash = "sha256:79dd6ad799784ea7b23edd56e3bf94b3ca866c4c6dee845658ee75bb4aefdabf"}, + {file = "levenshtein-0.26.1-cp310-cp310-win_arm64.whl", hash = "sha256:3351ddb105ef010cc2ce474894c5d213c83dddb7abb96400beaa4926b0b745bd"}, + {file = "levenshtein-0.26.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:44c51f5d33b3cfb9db518b36f1288437a509edd82da94c4400f6a681758e0cb6"}, + {file = "levenshtein-0.26.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56b93203e725f9df660e2afe3d26ba07d71871b6d6e05b8b767e688e23dfb076"}, + {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:270d36c5da04a0d89990660aea8542227cbd8f5bc34e9fdfadd34916ff904520"}, + {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:480674c05077eeb0b0f748546d4fcbb386d7c737f9fff0010400da3e8b552942"}, + {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13946e37323728695ba7a22f3345c2e907d23f4600bc700bf9b4352fb0c72a48"}, + {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ceb673f572d1d0dc9b1cd75792bb8bad2ae8eb78a7c6721e23a3867d318cb6f2"}, + {file = "levenshtein-0.26.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42d6fa242e3b310ce6bfd5af0c83e65ef10b608b885b3bb69863c01fb2fcff98"}, + {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b8b68295808893a81e0a1dbc2274c30dd90880f14d23078e8eb4325ee615fc68"}, + {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b01061d377d1944eb67bc40bef5d4d2f762c6ab01598efd9297ce5d0047eb1b5"}, + {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9d12c8390f156745e533d01b30773b9753e41d8bbf8bf9dac4b97628cdf16314"}, + {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:48825c9f967f922061329d1481b70e9fee937fc68322d6979bc623f69f75bc91"}, + {file = "levenshtein-0.26.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d8ec137170b95736842f99c0e7a9fd8f5641d0c1b63b08ce027198545d983e2b"}, + {file = "levenshtein-0.26.1-cp311-cp311-win32.whl", hash = "sha256:798f2b525a2e90562f1ba9da21010dde0d73730e277acaa5c52d2a6364fd3e2a"}, + {file = "levenshtein-0.26.1-cp311-cp311-win_amd64.whl", hash = "sha256:55b1024516c59df55f1cf1a8651659a568f2c5929d863d3da1ce8893753153bd"}, + {file = "levenshtein-0.26.1-cp311-cp311-win_arm64.whl", hash = "sha256:e52575cbc6b9764ea138a6f82d73d3b1bc685fe62e207ff46a963d4c773799f6"}, + {file = "levenshtein-0.26.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cc741ca406d3704dc331a69c04b061fc952509a069b79cab8287413f434684bd"}, + {file = "levenshtein-0.26.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:821ace3b4e1c2e02b43cf5dc61aac2ea43bdb39837ac890919c225a2c3f2fea4"}, + {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92694c9396f55d4c91087efacf81297bef152893806fc54c289fc0254b45384"}, + {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51ba374de7a1797d04a14a4f0ad3602d2d71fef4206bb20a6baaa6b6a502da58"}, + {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7aa5c3327dda4ef952769bacec09c09ff5bf426e07fdc94478c37955681885b"}, + {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33e2517e8d3c221de2d1183f400aed64211fcfc77077b291ed9f3bb64f141cdc"}, + {file = "levenshtein-0.26.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9092b622765c7649dd1d8af0f43354723dd6f4e570ac079ffd90b41033957438"}, + {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fc16796c85d7d8b259881d59cc8b5e22e940901928c2ff6924b2c967924e8a0b"}, + {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4370733967f5994ceeed8dc211089bedd45832ee688cecea17bfd35a9eb22b9"}, + {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3535ecfd88c9b283976b5bc61265855f59bba361881e92ed2b5367b6990c93fe"}, + {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:90236e93d98bdfd708883a6767826fafd976dac8af8fc4a0fb423d4fa08e1bf0"}, + {file = "levenshtein-0.26.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:04b7cabb82edf566b1579b3ed60aac0eec116655af75a3c551fee8754ffce2ea"}, + {file = "levenshtein-0.26.1-cp312-cp312-win32.whl", hash = "sha256:ae382af8c76f6d2a040c0d9ca978baf461702ceb3f79a0a3f6da8d596a484c5b"}, + {file = "levenshtein-0.26.1-cp312-cp312-win_amd64.whl", hash = "sha256:fd091209798cfdce53746f5769987b4108fe941c54fb2e058c016ffc47872918"}, + {file = "levenshtein-0.26.1-cp312-cp312-win_arm64.whl", hash = "sha256:7e82f2ea44a81ad6b30d92a110e04cd3c8c7c6034b629aca30a3067fa174ae89"}, + {file = "levenshtein-0.26.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:790374a9f5d2cbdb30ee780403a62e59bef51453ac020668c1564d1e43438f0e"}, + {file = "levenshtein-0.26.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7b05c0415c386d00efda83d48db9db68edd02878d6dbc6df01194f12062be1bb"}, + {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3114586032361722ddededf28401ce5baf1cf617f9f49fb86b8766a45a423ff"}, + {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2532f8a13b68bf09f152d906f118a88da2063da22f44c90e904b142b0a53d534"}, + {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:219c30be6aa734bf927188d1208b7d78d202a3eb017b1c5f01ab2034d2d4ccca"}, + {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397e245e77f87836308bd56305bba630010cd8298c34c4c44bd94990cdb3b7b1"}, + {file = "levenshtein-0.26.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeff6ea3576f72e26901544c6c55c72a7b79b9983b6f913cba0e9edbf2f87a97"}, + {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a19862e3539a697df722a08793994e334cd12791e8144851e8a1dee95a17ff63"}, + {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:dc3b5a64f57c3c078d58b1e447f7d68cad7ae1b23abe689215d03fc434f8f176"}, + {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bb6c7347424a91317c5e1b68041677e4c8ed3e7823b5bbaedb95bffb3c3497ea"}, + {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b817376de4195a207cc0e4ca37754c0e1e1078c2a2d35a6ae502afde87212f9e"}, + {file = "levenshtein-0.26.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7b50c3620ff47c9887debbb4c154aaaac3e46be7fc2e5789ee8dbe128bce6a17"}, + {file = "levenshtein-0.26.1-cp313-cp313-win32.whl", hash = "sha256:9fb859da90262eb474c190b3ca1e61dee83add022c676520f5c05fdd60df902a"}, + {file = "levenshtein-0.26.1-cp313-cp313-win_amd64.whl", hash = "sha256:8adcc90e3a5bfb0a463581d85e599d950fe3c2938ac6247b29388b64997f6e2d"}, + {file = "levenshtein-0.26.1-cp313-cp313-win_arm64.whl", hash = "sha256:c2599407e029865dc66d210b8804c7768cbdbf60f061d993bb488d5242b0b73e"}, + {file = "levenshtein-0.26.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc54ced948fc3feafce8ad4ba4239d8ffc733a0d70e40c0363ac2a7ab2b7251e"}, + {file = "levenshtein-0.26.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6516f69213ae393a220e904332f1a6bfc299ba22cf27a6520a1663a08eba0fb"}, + {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4cfea4eada1746d0c75a864bc7e9e63d4a6e987c852d6cec8d9cb0c83afe25b"}, + {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a323161dfeeac6800eb13cfe76a8194aec589cd948bcf1cdc03f66cc3ec26b72"}, + {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c23e749b68ebc9a20b9047317b5cd2053b5856315bc8636037a8adcbb98bed1"}, + {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f80dd7432d4b6cf493d012d22148db7af769017deb31273e43406b1fb7f091c"}, + {file = "levenshtein-0.26.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ae7cd6e4312c6ef34b2e273836d18f9fff518d84d823feff5ad7c49668256e0"}, + {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dcdad740e841d791b805421c2b20e859b4ed556396d3063b3aa64cd055be648c"}, + {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e07afb1613d6f5fd99abd4e53ad3b446b4efaa0f0d8e9dfb1d6d1b9f3f884d32"}, + {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f1add8f1d83099a98ae4ac472d896b7e36db48c39d3db25adf12b373823cdeff"}, + {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1010814b1d7a60833a951f2756dfc5c10b61d09976ce96a0edae8fecdfb0ea7c"}, + {file = "levenshtein-0.26.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:33fa329d1bb65ce85e83ceda281aea31cee9f2f6e167092cea54f922080bcc66"}, + {file = "levenshtein-0.26.1-cp39-cp39-win32.whl", hash = "sha256:488a945312f2f16460ab61df5b4beb1ea2254c521668fd142ce6298006296c98"}, + {file = "levenshtein-0.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:9f942104adfddd4b336c3997050121328c39479f69de702d7d144abb69ea7ab9"}, + {file = "levenshtein-0.26.1-cp39-cp39-win_arm64.whl", hash = "sha256:c1d8f85b2672939f85086ed75effcf768f6077516a3e299c2ba1f91bc4644c22"}, + {file = "levenshtein-0.26.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6cf8f1efaf90ca585640c5d418c30b7d66d9ac215cee114593957161f63acde0"}, + {file = "levenshtein-0.26.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d5b2953978b8c158dd5cd93af8216a5cfddbf9de66cf5481c2955f44bb20767a"}, + {file = "levenshtein-0.26.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b952b3732c4631c49917d4b15d78cb4a2aa006c1d5c12e2a23ba8e18a307a055"}, + {file = "levenshtein-0.26.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07227281e12071168e6ae59238918a56d2a0682e529f747b5431664f302c0b42"}, + {file = "levenshtein-0.26.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8191241cd8934feaf4d05d0cc0e5e72877cbb17c53bbf8c92af9f1aedaa247e9"}, + {file = "levenshtein-0.26.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9e70d7ee157a9b698c73014f6e2b160830e7d2d64d2e342fefc3079af3c356fc"}, + {file = "levenshtein-0.26.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0eb3059f826f6cb0a5bca4a85928070f01e8202e7ccafcba94453470f83e49d4"}, + {file = "levenshtein-0.26.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:6c389e44da12d6fb1d7ba0a709a32a96c9391e9be4160ccb9269f37e040599ee"}, + {file = "levenshtein-0.26.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e9de292f2c51a7d34a0ae23bec05391b8f61f35781cd3e4c6d0533e06250c55"}, + {file = "levenshtein-0.26.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d87215113259efdca8716e53b6d59ab6d6009e119d95d45eccc083148855f33"}, + {file = "levenshtein-0.26.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f00a3eebf68a82fb651d8d0e810c10bfaa60c555d21dde3ff81350c74fb4c2"}, + {file = "levenshtein-0.26.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b3554c1b59de63d05075577380340c185ff41b028e541c0888fddab3c259a2b4"}, + {file = "levenshtein-0.26.1.tar.gz", hash = "sha256:0d19ba22330d50609b2349021ec3cf7d905c6fe21195a2d0d876a146e7ed2575"}, +] + +[package.dependencies] +rapidfuzz = ">=3.9.0,<4.0.0" + +[[package]] +name = "litellm" +version = "1.51.3" +description = "Library to easily interface with LLM API providers" +optional = false +python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "litellm-1.51.3-py3-none-any.whl", hash = "sha256:440d3c7cc5ab8eeb12cee8f4d806bff05b7db834ebc11117d7fa070a1142ced5"}, + {file = "litellm-1.51.3.tar.gz", hash = "sha256:31eff9fcbf7b058bac0fd7432c4ea0487e8555f12446a1f30e5862e33716f44d"}, +] + +[package.dependencies] +aiohttp = "*" +click = "*" +importlib-metadata = ">=6.8.0" +jinja2 = ">=3.1.2,<4.0.0" +jsonschema = ">=4.22.0,<5.0.0" +openai = ">=1.52.0" +pydantic = ">=2.0.0,<3.0.0" +python-dotenv = ">=0.2.0" +requests = ">=2.31.0,<3.0.0" +tiktoken = ">=0.7.0" +tokenizers = "*" + +[package.extras] +extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "resend (>=0.8.0,<0.9.0)"] +proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "cryptography (>=42.0.5,<43.0.0)", "fastapi (>=0.111.0,<0.112.0)", "fastapi-sso (>=0.10.0,<0.11.0)", "gunicorn (>=22.0.0,<23.0.0)", "orjson (>=3.9.7,<4.0.0)", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.9,<0.0.10)", "pyyaml (>=6.0.1,<7.0.0)", "rq", "uvicorn (>=0.22.0,<0.23.0)"] + [[package]] name = "llvmlite" version = "0.43.0" description = "lightweight wrapper around basic LLVM functionality" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}, {file = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}, @@ -4729,6 +5245,8 @@ version = "0.7.3" description = "Python logging made (stupidly) simple" optional = false python-versions = "<4.0,>=3.5" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, @@ -4747,6 +5265,8 @@ version = "5.3.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, @@ -4901,6 +5421,8 @@ version = "4.3.3" description = "LZ4 Bindings for Python" optional = false python-versions = ">=3.8" +groups = ["tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "lz4-4.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b891880c187e96339474af2a3b2bfb11a8e4732ff5034be919aa9029484cd201"}, {file = "lz4-4.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:222a7e35137d7539c9c33bb53fcbb26510c5748779364014235afc62b0ec797f"}, @@ -4951,6 +5473,8 @@ version = "1.0.56" description = "Mailchimp Transactional API" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mailchimp_transactional-1.0.56-py3-none-any.whl", hash = "sha256:a76ea88b90a2d47d8b5134586aabbd3a96c459f6066d8886748ab59e50de36eb"}, ] @@ -4968,6 +5492,8 @@ version = "1.3.8" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Mako-1.3.8-py3-none-any.whl", hash = "sha256:42f48953c7eb91332040ff567eb7eea69b22e7a4affbc5ba8e845e8f730f6627"}, {file = "mako-1.3.8.tar.gz", hash = "sha256:577b97e414580d3e088d47c2dbbe9594aa7a5146ed2875d4dfa9075af2dd3cc8"}, @@ -4987,6 +5513,8 @@ version = "3.5.2" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, @@ -5002,6 +5530,8 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -5026,6 +5556,8 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -5096,6 +5628,8 @@ version = "3.24.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "marshmallow-3.24.1-py3-none-any.whl", hash = "sha256:ddb5c9987017d37be351c184e4e867e7bf55f7331f4da730dedad6b7af662cdd"}, {file = "marshmallow-3.24.1.tar.gz", hash = "sha256:efdcb656ac8788f0e3d1d938f8dc0f237bf1a99aff8f6dfbffa594981641cea0"}, @@ -5115,6 +5649,8 @@ version = "3.8.4" description = "Python plotting package" optional = false python-versions = ">=3.9" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "matplotlib-3.8.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014"}, {file = "matplotlib-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106"}, @@ -5163,6 +5699,8 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -5174,6 +5712,8 @@ version = "2.4.11" description = "A lightweight version of Milvus wrapped with Python." optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "(python_version == \"3.11\" or python_version >= \"3.12\") and sys_platform != \"win32\"" files = [ {file = "milvus_lite-2.4.11-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:9e563ae0dca1b41bfd76b90f06b2bcc474460fe4eba142c9bab18d2747ff843b"}, {file = "milvus_lite-2.4.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d21472bd24eb327542817829ce7cb51878318e6173c4d62353c77421aecf98d6"}, @@ -5190,6 +5730,8 @@ version = "3.1.0" description = "A sane and fast Markdown parser with useful plugins and renderers" optional = false python-versions = ">=3.8" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mistune-3.1.0-py3-none-any.whl", hash = "sha256:b05198cf6d671b3deba6c87ec6cf0d4eb7b72c524636eddb6dbf13823b52cee1"}, {file = "mistune-3.1.0.tar.gz", hash = "sha256:dbcac2f78292b9dc066cd03b7a3a26b62d85f8159f2ea5fd28e55df79908d667"}, @@ -5201,6 +5743,8 @@ version = "5.0.1" description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mmh3-5.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f0a4b4bf05778ed77d820d6e7d0e9bd6beb0c01af10e1ce9233f5d2f814fcafa"}, {file = "mmh3-5.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac7a391039aeab95810c2d020b69a94eb6b4b37d4e2374831e92db3a0cdf71c6"}, @@ -5314,6 +5858,8 @@ version = "4.0.3" description = "Rolling backport of unittest.mock for all Pythons" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"}, {file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"}, @@ -5330,6 +5876,8 @@ version = "1.6" description = "An implementation of time.monotonic() for Python 2 & < 3.3" optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, @@ -5341,6 +5889,8 @@ version = "0.0.10" description = "Fonts manager for matplotlib" optional = false python-versions = ">=3.9" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mplfonts-0.0.10-py3-none-any.whl", hash = "sha256:835e35ada4a6ef85ce29ea81dd589a98b92af5b5c8e8b9f4e2d79dfea9c2ba40"}, {file = "mplfonts-0.0.10.tar.gz", hash = "sha256:5da8d1afd53b8d38a1053d61a7ebd936de08b8480fba17f9b655beb270af8089"}, @@ -5358,6 +5908,8 @@ version = "1.3.0" description = "Python library for arbitrary-precision floating-point arithmetic" optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, @@ -5375,6 +5927,8 @@ version = "1.31.1" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "msal-1.31.1-py3-none-any.whl", hash = "sha256:29d9882de247e96db01386496d59f29035e5e841bcac892e6d7bf4390bf6bd17"}, {file = "msal-1.31.1.tar.gz", hash = "sha256:11b5e6a3f802ffd3a72107203e20c4eac6ef53401961b880af2835b723d80578"}, @@ -5394,6 +5948,8 @@ version = "1.2.0" description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "msal_extensions-1.2.0-py3-none-any.whl", hash = "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d"}, {file = "msal_extensions-1.2.0.tar.gz", hash = "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef"}, @@ -5409,6 +5965,8 @@ version = "0.7.1" description = "AutoRest swagger generator Python client runtime." optional = false python-versions = ">=3.6" +groups = ["main", "storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32"}, {file = "msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9"}, @@ -5430,6 +5988,8 @@ version = "6.1.0" description = "multidict implementation" optional = false python-versions = ">=3.8" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -5531,6 +6091,8 @@ version = "0.70.17" description = "better multiprocessing and multithreading in Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "multiprocess-0.70.17-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7ddb24e5bcdb64e90ec5543a1f05a39463068b6d3b804aa3f2a4e16ec28562d6"}, {file = "multiprocess-0.70.17-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d729f55198a3579f6879766a6d9b72b42d4b320c0dcb7844afb774d75b573c62"}, @@ -5559,6 +6121,8 @@ version = "0.0.11" description = "Non-blocking Python methods using decorators" optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "multitasking-0.0.11-py3-none-any.whl", hash = "sha256:1e5b37a5f8fc1e6cfaafd1a82b6b1cc6d2ed20037d3b89c25a84f499bd7b3dd4"}, {file = "multitasking-0.0.11.tar.gz", hash = "sha256:4d6bc3cc65f9b2dca72fb5a787850a88dae8f620c2b36ae9b55248e51bcd6026"}, @@ -5570,6 +6134,8 @@ version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, @@ -5622,6 +6188,8 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["main", "dev"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -5633,6 +6201,8 @@ version = "0.3.1" description = "JsonDecoder for ndjson" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ndjson-0.3.1-py2.py3-none-any.whl", hash = "sha256:839c22275e6baa3040077b83c005ac24199b94973309a8a1809be962c753a410"}, {file = "ndjson-0.3.1.tar.gz", hash = "sha256:bf9746cb6bb1cb53d172cda7f154c07c786d665ff28341e4e689b796b229e5d6"}, @@ -5644,6 +6214,8 @@ version = "1.6.0" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, @@ -5655,6 +6227,8 @@ version = "0.2.8" description = "Simplified python article discovery & extraction." optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "newspaper3k-0.2.8-py3-none-any.whl", hash = "sha256:44a864222633d3081113d1030615991c3dbba87239f6bbf59d91240f71a22e3e"}, {file = "newspaper3k-0.2.8.tar.gz", hash = "sha256:9f1bd3e1fb48f400c715abf875cc7b0a67b7ddcd87f50c9aeeb8fcbbbd9004fb"}, @@ -5681,6 +6255,8 @@ version = "3.9.1" description = "Natural Language Toolkit" optional = false python-versions = ">=3.8" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, @@ -5706,6 +6282,8 @@ version = "3.1.3" description = "The official Nomic python client." optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nomic-3.1.3.tar.gz", hash = "sha256:b06744b79fbe47451874ca7b272cafa1bb272cfb82acc79c64abfc943a98e035"}, ] @@ -5736,6 +6314,8 @@ version = "0.5.7" description = "novita SDK for Python" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "novita_client-0.5.7-py3-none-any.whl", hash = "sha256:844a4c09c98328c8d4f72e1d3f63f76285c2963dcc37ccb2de41cbfdbe7fa51d"}, {file = "novita_client-0.5.7.tar.gz", hash = "sha256:65baf748757aafd8ab080a64f9ab069a40c0810fc1fa9be9c26596988a0aa4b4"}, @@ -5752,6 +6332,8 @@ version = "0.60.0" description = "compiling Python code using LLVM" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}, {file = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}, @@ -5786,6 +6368,8 @@ version = "2.9.0" description = "Fast numerical expression evaluator for NumPy" optional = false python-versions = ">=3.9" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "numexpr-2.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c52b4ac54514f5d4d8ead66768810cd5f77aa198e6064213d9b5c7b2e1c97c35"}, {file = "numexpr-2.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50f57bc333f285e8c46b1ce61c6e94ec9bb74e4ea0d674d1c6c6f4a286f64fe4"}, @@ -5827,6 +6411,8 @@ version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" +groups = ["main", "indirect", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -5872,6 +6458,8 @@ version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = false python-versions = ">=3.6" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, @@ -5888,6 +6476,8 @@ version = "2.135.2" description = "Oracle Cloud Infrastructure Python SDK" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "oci-2.135.2-py3-none-any.whl", hash = "sha256:5213319244e1c7f108bcb417322f33f01f043fd9636d4063574039f5fdf4e4f7"}, {file = "oci-2.135.2.tar.gz", hash = "sha256:520f78983c5246eae80dd5ecfd05e3a565c8b98d02ef0c1b11ba1f61bcccb61d"}, @@ -5907,6 +6497,8 @@ version = "1.4.1" description = "Python API and tools to manipulate OpenDocument files" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "odfpy-1.4.1.tar.gz", hash = "sha256:db766a6e59c5103212f3cc92ec8dd50a0f3a02790233ed0b52148b70d3c438ec"}, ] @@ -5920,6 +6512,8 @@ version = "0.47" description = "Python package to parse, read and write Microsoft OLE2 files (Structured Storage or Compound Document, Microsoft Office)" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "olefile-0.47-py2.py3-none-any.whl", hash = "sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f"}, {file = "olefile-0.47.zip", hash = "sha256:599383381a0bf3dfbd932ca0ca6515acd174ed48870cbf7fee123d698c192c1c"}, @@ -5934,6 +6528,8 @@ version = "1.20.1" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "onnxruntime-1.20.1-cp310-cp310-macosx_13_0_universal2.whl", hash = "sha256:e50ba5ff7fed4f7d9253a6baf801ca2883cc08491f9d32d78a80da57256a5439"}, {file = "onnxruntime-1.20.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b2908b50101a19e99c4d4e97ebb9905561daf61829403061c1adc1b588bc0de"}, @@ -5972,6 +6568,8 @@ version = "1.52.2" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "openai-1.52.2-py3-none-any.whl", hash = "sha256:57e9e37bc407f39bb6ec3a27d7e8fb9728b2779936daa1fcf95df17d3edfaccc"}, {file = "openai-1.52.2.tar.gz", hash = "sha256:87b7d0f69d85f5641678d414b7ee3082363647a5c66a462ed7f3ccb59582da0d"}, @@ -5996,6 +6594,8 @@ version = "0.11.4" description = "A stats collection and distributed tracing framework" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opencensus-0.11.4-py2.py3-none-any.whl", hash = "sha256:a18487ce68bc19900336e0ff4655c5a116daf10c1b3685ece8d971bddad6a864"}, {file = "opencensus-0.11.4.tar.gz", hash = "sha256:cbef87d8b8773064ab60e5c2a1ced58bbaa38a6d052c41aec224958ce544eff2"}, @@ -6012,6 +6612,8 @@ version = "0.1.3" description = "OpenCensus Runtime Context" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opencensus-context-0.1.3.tar.gz", hash = "sha256:a03108c3c10d8c80bb5ddf5c8a1f033161fa61972a9917f9b9b3a18517f0088c"}, {file = "opencensus_context-0.1.3-py2.py3-none-any.whl", hash = "sha256:073bb0590007af276853009fac7e4bab1d523c3f03baf4cb4511ca38967c6039"}, @@ -6023,6 +6625,8 @@ version = "1.1.14" description = "OpenCensus Azure Monitor Exporter" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opencensus-ext-azure-1.1.14.tar.gz", hash = "sha256:c9c6ebad542aeb61813322e627d5889a563e7b8c4e024bf58469d06db73ab148"}, {file = "opencensus_ext_azure-1.1.14-py2.py3-none-any.whl", hash = "sha256:a1f6870d6e4e312832e6ebd95df28ed499ac637c36cbd77665fe06e24ddeb2f1"}, @@ -6041,6 +6645,8 @@ version = "0.1.1" description = "OpenCensus logging Integration" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opencensus-ext-logging-0.1.1.tar.gz", hash = "sha256:c203b70f034151dada529f543af330ba17aaffec27d8a5267d03c713eb1de334"}, {file = "opencensus_ext_logging-0.1.1-py2.py3-none-any.whl", hash = "sha256:cfdaf5da5d8b195ff3d1af87a4066a6621a28046173f6be4b0b6caec4a3ca89f"}, @@ -6055,6 +6661,8 @@ version = "0.45.13" description = "Apache OpenDAL™ Python Binding" optional = false python-versions = ">=3.10" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opendal-0.45.13-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3afe389215249b1d067cace6b8d1259ab1a2a74bc963d1c7e47dac5e85c8ffc5"}, {file = "opendal-0.45.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a0062482d348617abdc89515fa9cea5c17ae8ac28694b8b5a704530eb91c90e"}, @@ -6081,6 +6689,8 @@ version = "3.1.5" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, @@ -6095,6 +6705,8 @@ version = "2.4.0" description = "Python client for OpenSearch" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opensearch-py-2.4.0.tar.gz", hash = "sha256:7eba2b6ed2ddcf33225bfebfba2aee026877838cc39f760ec80f27827308cc4b"}, {file = "opensearch_py-2.4.0-py2.py3-none-any.whl", hash = "sha256:316077235437c8ceac970232261f3393c65fb92a80f33c5b106f50f1dab24fd9"}, @@ -6119,6 +6731,8 @@ version = "1.29.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opentelemetry_api-1.29.0-py3-none-any.whl", hash = "sha256:5fcd94c4141cc49c736271f3e1efb777bebe9cc535759c54c936cca4f1b312b8"}, {file = "opentelemetry_api-1.29.0.tar.gz", hash = "sha256:d04a6cf78aad09614f52964ecb38021e248f5714dc32c2e0d8fd99517b4d69cf"}, @@ -6134,6 +6748,8 @@ version = "1.15.0" description = "OpenTelemetry Collector Protobuf over gRPC Exporter" optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opentelemetry_exporter_otlp_proto_grpc-1.15.0-py3-none-any.whl", hash = "sha256:c2a5492ba7d140109968135d641d06ce3c5bd73c50665f787526065d57d7fd1d"}, {file = "opentelemetry_exporter_otlp_proto_grpc-1.15.0.tar.gz", hash = "sha256:844f2a4bb9bcda34e4eb6fe36765e5031aacb36dc60ed88c90fc246942ea26e7"}, @@ -6156,6 +6772,8 @@ version = "0.50b0" description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opentelemetry_instrumentation-0.50b0-py3-none-any.whl", hash = "sha256:b8f9fc8812de36e1c6dffa5bfc6224df258841fb387b6dfe5df15099daa10630"}, {file = "opentelemetry_instrumentation-0.50b0.tar.gz", hash = "sha256:7d98af72de8dec5323e5202e46122e5f908592b22c6d24733aad619f07d82979"}, @@ -6173,6 +6791,8 @@ version = "0.50b0" description = "ASGI instrumentation for OpenTelemetry" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opentelemetry_instrumentation_asgi-0.50b0-py3-none-any.whl", hash = "sha256:2ba1297f746e55dec5a17fe825689da0613662fb25c004c3965a6c54b1d5be22"}, {file = "opentelemetry_instrumentation_asgi-0.50b0.tar.gz", hash = "sha256:3ca4cb5616ae6a3e8ce86e7d5c360a8d8cc8ed722cf3dc8a5e44300774e87d49"}, @@ -6194,6 +6814,8 @@ version = "0.50b0" description = "OpenTelemetry FastAPI Instrumentation" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opentelemetry_instrumentation_fastapi-0.50b0-py3-none-any.whl", hash = "sha256:8f03b738495e4705fbae51a2826389c7369629dace89d0f291c06ffefdff5e52"}, {file = "opentelemetry_instrumentation_fastapi-0.50b0.tar.gz", hash = "sha256:16b9181682136da210295def2bb304a32fb9bdee9a935cdc9da43567f7c1149e"}, @@ -6215,6 +6837,8 @@ version = "1.15.0" description = "OpenTelemetry Python Proto" optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opentelemetry_proto-1.15.0-py3-none-any.whl", hash = "sha256:044b6d044b4d10530f250856f933442b8753a17f94ae37c207607f733fb9a844"}, {file = "opentelemetry_proto-1.15.0.tar.gz", hash = "sha256:9c4008e40ac8cab359daac283fbe7002c5c29c77ea2674ad5626a249e64e0101"}, @@ -6229,6 +6853,8 @@ version = "1.29.0" description = "OpenTelemetry Python SDK" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opentelemetry_sdk-1.29.0-py3-none-any.whl", hash = "sha256:173be3b5d3f8f7d671f20ea37056710217959e774e2749d984355d1f9391a30a"}, {file = "opentelemetry_sdk-1.29.0.tar.gz", hash = "sha256:b0787ce6aade6ab84315302e72bd7a7f2f014b0fb1b7c3295b88afe014ed0643"}, @@ -6245,6 +6871,8 @@ version = "0.50b0" description = "OpenTelemetry Semantic Conventions" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opentelemetry_semantic_conventions-0.50b0-py3-none-any.whl", hash = "sha256:e87efba8fdb67fb38113efea6a349531e75ed7ffc01562f65b802fcecb5e115e"}, {file = "opentelemetry_semantic_conventions-0.50b0.tar.gz", hash = "sha256:02dc6dbcb62f082de9b877ff19a3f1ffaa3c306300fa53bfac761c4567c83d38"}, @@ -6260,17 +6888,48 @@ version = "0.50b0" description = "Web util for OpenTelemetry" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "opentelemetry_util_http-0.50b0-py3-none-any.whl", hash = "sha256:21f8aedac861ffa3b850f8c0a6c373026189eb8630ac6e14a2bf8c55695cc090"}, {file = "opentelemetry_util_http-0.50b0.tar.gz", hash = "sha256:dc4606027e1bc02aabb9533cc330dd43f874fca492e4175c31d7154f341754af"}, ] +[[package]] +name = "opik" +version = "1.3.4" +description = "Comet tool for logging and evaluating LLM traces" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "opik-1.3.4-py3-none-any.whl", hash = "sha256:c5e10a9f1fb18188471cce2ae8b841e8b187d04ee3b1aed01c643102bae588fb"}, + {file = "opik-1.3.4.tar.gz", hash = "sha256:6013d3af4aea61f38b9e7121aa5d8cf4305a5ed3807b3f43d9ab91602b2a5785"}, +] + +[package.dependencies] +click = "*" +httpx = "<0.28.0" +levenshtein = "<1.0.0" +litellm = "*" +openai = "<2.0.0" +pydantic = ">=2.0.0,<3.0.0" +pydantic-settings = ">=2.0.0,<3.0.0" +pytest = "*" +rich = "*" +tenacity = "*" +tqdm = "*" +uuid6 = "*" + [[package]] name = "oracledb" version = "2.2.1" description = "Python interface to Oracle Database" optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "oracledb-2.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3dacef7c4dd3fca94728f05336076e063450bb57ea569e8dd67fae960aaf537e"}, {file = "oracledb-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd8fdc93a65ae2e1c934a0e3e64cb01997ba004c48a986a37583f670dd344802"}, @@ -6314,6 +6973,7 @@ version = "3.10.13" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" +groups = ["main", "vdb"] files = [ {file = "orjson-3.10.13-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1232c5e873a4d1638ef957c5564b4b0d6f2a6ab9e207a9b3de9de05a09d1d920"}, {file = "orjson-3.10.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d26a0eca3035619fa366cbaf49af704c7cb1d4a0e6c79eced9f6a3f2437964b6"}, @@ -6391,6 +7051,7 @@ files = [ {file = "orjson-3.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:5385935a73adce85cc7faac9d396683fd813566d3857fa95a0b521ef84a5b588"}, {file = "orjson-3.10.13.tar.gz", hash = "sha256:eb9bfb14ab8f68d9d9492d4817ae497788a15fd7da72e14dfabc289c3bb088ec"}, ] +markers = {main = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\"", vdb = "python_version == \"3.11\" or python_version >= \"3.12\""} [[package]] name = "oss2" @@ -6398,6 +7059,8 @@ version = "2.18.5" description = "Aliyun OSS (Object Storage Service) SDK" optional = false python-versions = "*" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "oss2-2.18.5.tar.gz", hash = "sha256:555c857f4441ae42a2c0abab8fc9482543fba35d65a4a4be73101c959a2b4011"}, ] @@ -6416,6 +7079,8 @@ version = "7.7.0" description = "A decorator to automatically detect mismatch when overriding a method." optional = false python-versions = ">=3.6" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, @@ -6427,6 +7092,8 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -6438,6 +7105,8 @@ version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" +groups = ["main", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, @@ -6532,6 +7201,8 @@ version = "2.2.3.241126" description = "Type annotations for pandas" optional = false python-versions = ">=3.10" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pandas_stubs-2.2.3.241126-py3-none-any.whl", hash = "sha256:74aa79c167af374fe97068acc90776c0ebec5266a6e5c69fe11e9c2cf51f2267"}, {file = "pandas_stubs-2.2.3.241126.tar.gz", hash = "sha256:cf819383c6d9ae7d4dabf34cd47e1e45525bb2f312e6ad2939c2c204cb708acd"}, @@ -6547,6 +7218,8 @@ version = "0.3.3" description = "parallel graph management and execution in heterogeneous computing" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pathos-0.3.3-py3-none-any.whl", hash = "sha256:e04616c6448608ad1f809360be22e3f2078d949a36a81e6991da6c2dd1f82513"}, {file = "pathos-0.3.3.tar.gz", hash = "sha256:dcb2a5f321aa34ca541c1c1861011ea49df357bb908379c21dd5741f666e0a58"}, @@ -6564,6 +7237,8 @@ version = "3.17.8" description = "a little orm" optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "peewee-3.17.8.tar.gz", hash = "sha256:ce1d05db3438830b989a1b9d0d0aa4e7f6134d5f6fd57686eeaa26a3e6485a8c"}, ] @@ -6574,6 +7249,8 @@ version = "0.2.2" description = "Python binding for pgvecto.rs" optional = false python-versions = "<3.13,>=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pgvecto_rs-0.2.2-py3-none-any.whl", hash = "sha256:5f3f7f806813de408c45dc10a9eb418b986c4d7b7723e8fce9298f2f7d8fbbd5"}, {file = "pgvecto_rs-0.2.2.tar.gz", hash = "sha256:edaa913d1747152b1407cbdf6337d51ac852547b54953ef38997433be3a75a3b"}, @@ -6596,6 +7273,8 @@ version = "0.2.5" description = "pgvector support for Python" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b"}, ] @@ -6609,6 +7288,8 @@ version = "11.1.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.9" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, @@ -6697,6 +7378,8 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -6713,6 +7396,8 @@ version = "5.24.1" description = "An open-source, interactive data visualization library for Python" optional = false python-versions = ">=3.8" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "plotly-5.24.1-py3-none-any.whl", hash = "sha256:f67073a1e637eb0dc3e46324d9d51e2fe76e9727c892dde64ddf1e1b51f29089"}, {file = "plotly-5.24.1.tar.gz", hash = "sha256:dbc8ac8339d248a4bcc36e08a5659bacfe1b079390b8953533f4eb22169b4bae"}, @@ -6728,6 +7413,8 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -6743,6 +7430,8 @@ version = "3.11" description = "Python Lex & Yacc" optional = false python-versions = "*" +groups = ["lint", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, @@ -6754,6 +7443,8 @@ version = "2.10.1" description = "Wraps the portalocker recipe for easy usage" optional = false python-versions = ">=3.8" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, @@ -6773,6 +7464,8 @@ version = "0.17.2" description = "PostgREST client for Python. This library provides an ORM interface to PostgREST." optional = false python-versions = "<4.0,>=3.9" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "postgrest-0.17.2-py3-none-any.whl", hash = "sha256:f7c4f448e5a5e2d4c1dcf192edae9d1007c4261e9a6fb5116783a0046846ece2"}, {file = "postgrest-0.17.2.tar.gz", hash = "sha256:445cd4e4a191e279492549df0c4e827d32f9d01d0852599bb8a6efb0f07fcf78"}, @@ -6789,6 +7482,8 @@ version = "3.7.5" description = "Integrate PostHog into any python application." optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "posthog-3.7.5-py2.py3-none-any.whl", hash = "sha256:022132c17069dde03c5c5904e2ae1b9bd68d5059cbc5a8dffc5c1537a1b71cb5"}, {file = "posthog-3.7.5.tar.gz", hash = "sha256:8ba40ab623da35db72715fc87fe7dccb7fc272ced92581fe31db2d4dbe7ad761"}, @@ -6812,6 +7507,8 @@ version = "0.3.5" description = "utilities for filesystem exploration and automated builds" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pox-0.3.5-py3-none-any.whl", hash = "sha256:9e82bcc9e578b43e80a99cad80f0d8f44f4d424f0ee4ee8d4db27260a6aa365a"}, {file = "pox-0.3.5.tar.gz", hash = "sha256:8120ee4c94e950e6e0483e050a4f0e56076e590ba0a9add19524c254bd23c2d1"}, @@ -6823,6 +7520,8 @@ version = "1.7.6.9" description = "distributed and parallel Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ppft-1.7.6.9-py3-none-any.whl", hash = "sha256:dab36548db5ca3055067fbe6b1a17db5fee29f3c366c579a9a27cebb52ed96f0"}, {file = "ppft-1.7.6.9.tar.gz", hash = "sha256:73161c67474ea9d81d04bcdad166d399cff3f084d5d2dc21ebdd46c075bbc265"}, @@ -6837,6 +7536,8 @@ version = "0.10.0" description = "HTTP client that can impersonate web browsers, mimicking their headers and `TLS/JA3/JA4/HTTP2` fingerprints" optional = false python-versions = ">=3.8" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "primp-0.10.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:7a91a089bf2962b5b56c8d83d09535eb81cf55b53c09d83208b9e5a715cf2c17"}, {file = "primp-0.10.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:0128453cce81552f7aa6ac2bf9b8741b7816cdb2d10536e62c77daaf6483b9af"}, @@ -6858,6 +7559,8 @@ version = "3.0.48" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, @@ -6872,6 +7575,8 @@ version = "0.2.1" description = "Accelerated property cache" optional = false python-versions = ">=3.9" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, @@ -6963,6 +7668,8 @@ version = "1.25.0" description = "Beautiful, Pythonic protocol buffers." optional = false python-versions = ">=3.7" +groups = ["main", "storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961"}, {file = "proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91"}, @@ -6980,6 +7687,8 @@ version = "4.25.5" description = "" optional = false python-versions = ">=3.8" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, @@ -7000,6 +7709,8 @@ version = "6.1.1" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"}, {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"}, @@ -7030,6 +7741,8 @@ version = "1.0.2" description = "psycopg2 integration with coroutine libraries" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "psycogreen-1.0.2.tar.gz", hash = "sha256:c429845a8a49cf2f76b71265008760bcd7c7c77d80b806db4dc81116dbcd130d"}, ] @@ -7040,6 +7753,8 @@ version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.8" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, @@ -7116,6 +7831,8 @@ version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -7127,6 +7844,8 @@ version = "9.0.0" description = "Get CPU info with pure Python" optional = false python-versions = "*" +groups = ["dev"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, @@ -7138,6 +7857,8 @@ version = "18.1.0" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.9" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e21488d5cfd3d8b500b3238a6c4b075efabc18f0f6d80b29239737ebd69caa6c"}, {file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b516dad76f258a702f7ca0250885fc93d1fa5ac13ad51258e39d402bd9e2e1e4"}, @@ -7192,6 +7913,8 @@ version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -7203,6 +7926,8 @@ version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, @@ -7217,10 +7942,12 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main", "storage", "tools", "vdb"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] +markers = {main = "python_version == \"3.11\" or python_version >= \"3.12\"", storage = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\"", tools = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation == \"PyPy\"", vdb = "python_version == \"3.11\" or python_version >= \"3.12\""} [[package]] name = "pycryptodome" @@ -7228,6 +7955,8 @@ version = "3.19.1" description = "Cryptographic library for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pycryptodome-3.19.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:694020d2ff985cd714381b9da949a21028c24b86f562526186f6af7c7547e986"}, {file = "pycryptodome-3.19.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:4464b0e8fd5508bff9baf18e6fd4c6548b1ac2ce9862d6965ff6a84ec9cb302a"}, @@ -7269,6 +7998,8 @@ version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, @@ -7289,6 +8020,8 @@ version = "2.23.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, @@ -7390,6 +8123,8 @@ version = "2.9.0" description = "Extra Pydantic types." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic_extra_types-2.9.0-py3-none-any.whl", hash = "sha256:f0bb975508572ba7bf3390b7337807588463b7248587e69f43b1ad7c797530d0"}, {file = "pydantic_extra_types-2.9.0.tar.gz", hash = "sha256:e061c01636188743bb69f368dcd391f327b8cfbfede2fe1cbb1211b06601ba3b"}, @@ -7412,6 +8147,8 @@ version = "2.6.1" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87"}, {file = "pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0"}, @@ -7432,6 +8169,8 @@ version = "8.0.4" description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydash-8.0.4-py3-none-any.whl", hash = "sha256:59d0c9ca0d22b4f8bcfab01bfe2e89b49f4c9e9fa75961caf156094670260999"}, {file = "pydash-8.0.4.tar.gz", hash = "sha256:a33fb17b4b06c617da5c57c711605d2dc8723311ee5388c8371f87cd44bf4112"}, @@ -7449,6 +8188,8 @@ version = "0.25.1" description = "Manipulate audio with an simple and easy high level interface" optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6"}, {file = "pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f"}, @@ -7460,6 +8201,8 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -7474,6 +8217,8 @@ version = "2.8.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.7" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, @@ -7494,6 +8239,8 @@ version = "2.5.3" description = "Python Sdk for Milvus" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pymilvus-2.5.3-py3-none-any.whl", hash = "sha256:64ca63594284586937274800be27a402f3be2d078130bf81d94ab8d7798ac9c8"}, {file = "pymilvus-2.5.3.tar.gz", hash = "sha256:68bc3797b7a14c494caf116cee888894ffd6eba7b96a3ac841be85d60694cc5d"}, @@ -7519,6 +8266,8 @@ version = "1.3.1" description = "Python SDK for mochow" optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pymochow-1.3.1-py3-none-any.whl", hash = "sha256:a7f3b34fd6ea5d1d8413650bb6678365aa148fc396ae945e4ccb4f2365a52327"}, {file = "pymochow-1.3.1.tar.gz", hash = "sha256:1693d10cd0bb7bce45327890a90adafb503155922ccc029acb257699a73a20ba"}, @@ -7535,6 +8284,8 @@ version = "1.1.1" description = "Pure Python MySQL Driver" optional = false python-versions = ">=3.7" +groups = ["tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c"}, {file = "pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"}, @@ -7550,6 +8301,8 @@ version = "0.1.18" description = "A python SDK for OceanBase Vector Store, based on SQLAlchemy, compatible with Milvus API." optional = false python-versions = "<4.0,>=3.9" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyobvector-0.1.18-py3-none-any.whl", hash = "sha256:9ca4098fd58f87e9c6ff1cd4a5631c666d51d0607933dd3656b7274eacc36428"}, {file = "pyobvector-0.1.18.tar.gz", hash = "sha256:0497764dc8f60ab2ce8b8d738b05dea946df5679e773049620da5a339091ed92"}, @@ -7567,6 +8320,8 @@ version = "24.3.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"}, {file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"}, @@ -7585,6 +8340,8 @@ version = "1.14" description = "Thin wrapper for pandoc." optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pypandoc-1.14-py3-none-any.whl", hash = "sha256:1315c7ad7fac7236dacf69a05b521ed2c3f1d0177f70e9b92bfffce6c023df22"}, {file = "pypandoc-1.14.tar.gz", hash = "sha256:6b4c45f5f1b9fb5bb562079164806bdbbc3e837b5402bcf3f1139edc5730a197"}, @@ -7596,6 +8353,8 @@ version = "3.2.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.9" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, @@ -7610,6 +8369,8 @@ version = "5.1.0" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pypdf-5.1.0-py3-none-any.whl", hash = "sha256:3bd4f503f4ebc58bae40d81e81a9176c400cbbac2ba2d877367595fb524dfdfc"}, {file = "pypdf-5.1.0.tar.gz", hash = "sha256:425a129abb1614183fd1aca6982f650b47f8026867c0ce7c4b9f281c443d2740"}, @@ -7629,6 +8390,8 @@ version = "4.30.1" description = "Python bindings to PDFium" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pypdfium2-4.30.1-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:e07c47633732cc18d890bb7e965ad28a9c5a932e548acb928596f86be2e5ae37"}, {file = "pypdfium2-4.30.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5ea2d44e96d361123b67b00f527017aa9c847c871b5714e013c01c3eb36a79fe"}, @@ -7651,6 +8414,8 @@ version = "0.48.9" description = "A SQL query builder API for Python" optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"}, ] @@ -7661,6 +8426,8 @@ version = "0.20220715.0" description = "Pure Python library for saving and loading PNG images" optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pypng-0.20220715.0-py3-none-any.whl", hash = "sha256:4a43e969b8f5aaafb2a415536c1a8ec7e341cd6a3f957fd5b5f32a4cfeed902c"}, {file = "pypng-0.20220715.0.tar.gz", hash = "sha256:739c433ba96f078315de54c0db975aee537cbc3e1d0ae4ed9aab0ca1e427e2c1"}, @@ -7672,6 +8439,8 @@ version = "1.2.0" description = "Wrappers to call pyproject.toml-based build backend hooks." optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"}, {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, @@ -7683,6 +8452,8 @@ version = "3.5.4" description = "A python implementation of GNU readline." optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "(python_version == \"3.11\" or python_version >= \"3.12\") and sys_platform == \"win32\"" files = [ {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, @@ -7697,6 +8468,8 @@ version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -7717,6 +8490,8 @@ version = "4.0.0" description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"}, {file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"}, @@ -7737,6 +8512,8 @@ version = "1.1.5" description = "pytest plugin that allows you to add environment variables." optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest_env-1.1.5-py3-none-any.whl", hash = "sha256:ce90cf8772878515c24b31cd97c7fa1f4481cd68d588419fd45f10ecaee6bc30"}, {file = "pytest_env-1.1.5.tar.gz", hash = "sha256:91209840aa0e43385073ac464a554ad2947cc2fd663a9debf88d03b01e0cc1cf"}, @@ -7754,6 +8531,8 @@ version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, @@ -7771,6 +8550,8 @@ version = "0.3.1" description = "Python binding for Rust's library for reading excel and odf file - calamine" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python_calamine-0.3.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2822c39ad52f289732981cee59b4985388624b54e124e41436bb37565ed32f15"}, {file = "python_calamine-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f2786751cfe4e81f9170b843741b39a325cf9f49db8d51fc3cd16d6139e0ac60"}, @@ -7883,6 +8664,8 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -7897,6 +8680,8 @@ version = "1.1.2" description = "Create, read, and update Microsoft Word .docx files." optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python_docx-1.1.2-py3-none-any.whl", hash = "sha256:08c20d6058916fb19853fcf080f7f42b6270d89eac9fa5f8c15f691c0017fabe"}, {file = "python_docx-1.1.2.tar.gz", hash = "sha256:0cf1f22e95b9002addca7948e16f2cd7acdfd498047f1941ca5d293db7762efd"}, @@ -7912,6 +8697,8 @@ version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, @@ -7926,6 +8713,8 @@ version = "2024.10.22" description = "ISO 639 language codes, names, and other associated information" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python_iso639-2024.10.22-py3-none-any.whl", hash = "sha256:02d3ce2e01c6896b30b9cbbd3e1c8ee0d7221250b5d63ea9803e0d2a81fd1047"}, {file = "python_iso639-2024.10.22.tar.gz", hash = "sha256:750f21b6a0bc6baa24253a3d8aae92b582bf93aa40988361cd96852c2c6d9a52"}, @@ -7940,6 +8729,8 @@ version = "0.4.27" description = "File type identification using libmagic" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, @@ -7951,6 +8742,8 @@ version = "0.0.1" description = "Extract attachments from Outlook .msg files." optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python_oxmsg-0.0.1-py3-none-any.whl", hash = "sha256:8ea7d5dda1bc161a413213da9e18ed152927c1fda2feaf5d1f02192d8ad45eea"}, {file = "python_oxmsg-0.0.1.tar.gz", hash = "sha256:b65c1f93d688b85a9410afa824192a1ddc39da359b04a0bd2cbd3874e84d4994"}, @@ -7967,6 +8760,8 @@ version = "1.0.2" description = "Create, read, and update PowerPoint 2007+ (.pptx) files." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba"}, {file = "python_pptx-1.0.2.tar.gz", hash = "sha256:479a8af0eaf0f0d76b6f00b0887732874ad2e3188230315290cd1f9dd9cc7095"}, @@ -7984,6 +8779,8 @@ version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, @@ -7995,6 +8792,7 @@ version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" +groups = ["main", "vdb"] files = [ {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, @@ -8015,6 +8813,7 @@ files = [ {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] +markers = {main = "(python_version == \"3.11\" or python_version >= \"3.12\") and (platform_system == \"Windows\" or sys_platform == \"win32\")", vdb = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_system == \"Windows\""} [[package]] name = "pyxlsb" @@ -8022,6 +8821,8 @@ version = "1.0.10" description = "Excel 2007-2010 Binary Workbook (xlsb) parser" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyxlsb-1.0.10-py2.py3-none-any.whl", hash = "sha256:87c122a9a622e35ca5e741d2e541201d28af00fb46bec492cfa9586890b120b4"}, {file = "pyxlsb-1.0.10.tar.gz", hash = "sha256:8062d1ea8626d3f1980e8b1cfe91a4483747449242ecb61013bc2df85435f685"}, @@ -8033,6 +8834,8 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -8095,6 +8898,8 @@ version = "1.7.3" description = "Client library for the Qdrant vector search engine" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "qdrant_client-1.7.3-py3-none-any.whl", hash = "sha256:b062420ba55eb847652c7d2a26404fb1986bea13aa785763024013f96a7a915c"}, {file = "qdrant_client-1.7.3.tar.gz", hash = "sha256:7b809be892cdc5137ae80ea3335da40c06499ad0b0072b5abc6bad79da1d29fc"}, @@ -8121,6 +8926,8 @@ version = "7.4.2" description = "QR Code image generator" optional = false python-versions = ">=3.7" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "qrcode-7.4.2-py3-none-any.whl", hash = "sha256:581dca7a029bcb2deef5d01068e39093e80ef00b4a61098a2182eac59d01643a"}, {file = "qrcode-7.4.2.tar.gz", hash = "sha256:9dd969454827e127dbd93696b20747239e6d540e082937c90f14ac95b30f5845"}, @@ -8144,6 +8951,8 @@ version = "0.2.2" description = "Various BM25 algorithms for document ranking" optional = false python-versions = "*" +groups = ["indirect"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rank_bm25-0.2.2-py3-none-any.whl", hash = "sha256:7bd4a95571adadfc271746fa146a4bcfd89c0cf731e49c3d1ad863290adbe8ae"}, {file = "rank_bm25-0.2.2.tar.gz", hash = "sha256:096ccef76f8188563419aaf384a02f0ea459503fdf77901378d4fd9d87e5e51d"}, @@ -8161,6 +8970,8 @@ version = "3.11.0" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rapidfuzz-3.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb8a54543d16ab1b69e2c5ed96cabbff16db044a50eddfc028000138ca9ddf33"}, {file = "rapidfuzz-3.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:231c8b2efbd7f8d2ecd1ae900363ba168b8870644bb8f2b5aa96e4a7573bde19"}, @@ -8261,6 +9072,8 @@ version = "0.2.0" description = "Python wrapper for Mozilla's Readability.js" optional = false python-versions = ">=3.6.0" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "readabilipy-0.2.0-py3-none-any.whl", hash = "sha256:0050853cd6ab012ac75bb4d8f06427feb7dc32054da65060da44654d049802d0"}, {file = "readabilipy-0.2.0.tar.gz", hash = "sha256:098bf347b19f362042fb6c08864ad776588bf844ac2261fb230f7f9c250fdae5"}, @@ -8283,6 +9096,8 @@ version = "2.1.0" description = "" optional = false python-versions = "<4.0,>=3.9" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "realtime-2.1.0-py3-none-any.whl", hash = "sha256:e2d4f28bb2a08c1cf80e40fbf31e6116544ad29d67dd4093093e511ad738708c"}, {file = "realtime-2.1.0.tar.gz", hash = "sha256:ca3ae6be47667a3cf3a307fec982ec1bf60313c38a8e29f016ab0380b76d7adb"}, @@ -8300,6 +9115,8 @@ version = "5.0.8" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"}, {file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"}, @@ -8319,6 +9136,8 @@ version = "0.35.1" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, @@ -8334,6 +9153,8 @@ version = "2024.11.6" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, @@ -8437,6 +9258,8 @@ version = "0.22.0" description = "Python client for Replicate" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "replicate-0.22.0-py3-none-any.whl", hash = "sha256:a11e20e9589981a96bee6f3817494b5cc29735a108c71aff4515a81863ad9996"}, {file = "replicate-0.22.0.tar.gz", hash = "sha256:cab48c15ede619d5aa7d023a241626d504c70ea2b7db5792ebfb5ae9fa373cbc"}, @@ -8457,6 +9280,8 @@ version = "2.31.0" description = "Python HTTP for Humans." optional = false python-versions = ">=3.7" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, @@ -8478,6 +9303,8 @@ version = "2.1.0" description = "File transport adapter for Requests" optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "requests_file-2.1.0-py2.py3-none-any.whl", hash = "sha256:cf270de5a4c5874e84599fc5778303d496c10ae5e870bfa378818f35d21bda5c"}, {file = "requests_file-2.1.0.tar.gz", hash = "sha256:0f549a3f3b0699415ac04d167e9cb39bccfb730cb832b4d20be3d9867356e658"}, @@ -8492,6 +9319,8 @@ version = "2.0.0" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=3.4" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, @@ -8510,6 +9339,8 @@ version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -8524,6 +9355,8 @@ version = "0.7.2" description = "Resend Python SDK" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "resend-0.7.2-py2.py3-none-any.whl", hash = "sha256:4f16711e11b007da7f8826283af6cdc34c99bd77c1dfad92afe9466a90d06c61"}, {file = "resend-0.7.2.tar.gz", hash = "sha256:bb10522a5ef1235b6cc2d74902df39c4863ac12b89dc48b46dd5c6f980574622"}, @@ -8538,6 +9371,8 @@ version = "0.9.2" description = "Easy to use retry decorator." optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, @@ -8553,6 +9388,8 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -8571,6 +9408,8 @@ version = "0.22.3" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, @@ -8683,6 +9522,8 @@ version = "4.9" description = "Pure-Python RSA implementation" optional = false python-versions = ">=3.6,<4" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -8693,29 +9534,31 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.8.6" +version = "0.9.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["lint"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "ruff-0.8.6-py3-none-linux_armv6l.whl", hash = "sha256:defed167955d42c68b407e8f2e6f56ba52520e790aba4ca707a9c88619e580e3"}, - {file = "ruff-0.8.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:54799ca3d67ae5e0b7a7ac234baa657a9c1784b48ec954a094da7c206e0365b1"}, - {file = "ruff-0.8.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e88b8f6d901477c41559ba540beeb5a671e14cd29ebd5683903572f4b40a9807"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0509e8da430228236a18a677fcdb0c1f102dd26d5520f71f79b094963322ed25"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a7ddb221779871cf226100e677b5ea38c2d54e9e2c8ed847450ebbdf99b32d"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:248b1fb3f739d01d528cc50b35ee9c4812aa58cc5935998e776bf8ed5b251e75"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bc3c083c50390cf69e7e1b5a5a7303898966be973664ec0c4a4acea82c1d4315"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52d587092ab8df308635762386f45f4638badb0866355b2b86760f6d3c076188"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61323159cf21bc3897674e5adb27cd9e7700bab6b84de40d7be28c3d46dc67cf"}, - {file = "ruff-0.8.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ae4478b1471fc0c44ed52a6fb787e641a2ac58b1c1f91763bafbc2faddc5117"}, - {file = "ruff-0.8.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0c000a471d519b3e6cfc9c6680025d923b4ca140ce3e4612d1a2ef58e11f11fe"}, - {file = "ruff-0.8.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9257aa841e9e8d9b727423086f0fa9a86b6b420fbf4bf9e1465d1250ce8e4d8d"}, - {file = "ruff-0.8.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45a56f61b24682f6f6709636949ae8cc82ae229d8d773b4c76c09ec83964a95a"}, - {file = "ruff-0.8.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:496dd38a53aa173481a7d8866bcd6451bd934d06976a2505028a50583e001b76"}, - {file = "ruff-0.8.6-py3-none-win32.whl", hash = "sha256:e169ea1b9eae61c99b257dc83b9ee6c76f89042752cb2d83486a7d6e48e8f764"}, - {file = "ruff-0.8.6-py3-none-win_amd64.whl", hash = "sha256:f1d70bef3d16fdc897ee290d7d20da3cbe4e26349f62e8a0274e7a3f4ce7a905"}, - {file = "ruff-0.8.6-py3-none-win_arm64.whl", hash = "sha256:7d7fc2377a04b6e04ffe588caad613d0c460eb2ecba4c0ccbbfe2bc973cbc162"}, - {file = "ruff-0.8.6.tar.gz", hash = "sha256:dcad24b81b62650b0eb8814f576fc65cfee8674772a6e24c9b747911801eeaa5"}, + {file = "ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347"}, + {file = "ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00"}, + {file = "ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247"}, + {file = "ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e"}, + {file = "ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe"}, + {file = "ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb"}, + {file = "ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a"}, + {file = "ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145"}, + {file = "ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5"}, + {file = "ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6"}, + {file = "ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0"}, ] [[package]] @@ -8724,6 +9567,8 @@ version = "0.10.4" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e"}, {file = "s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7"}, @@ -8741,6 +9586,8 @@ version = "0.4.5" description = "" optional = false python-versions = ">=3.7" +groups = ["main", "indirect"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "safetensors-0.4.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a63eaccd22243c67e4f2b1c3e258b257effc4acd78f3b9d397edc8cf8f1298a7"}, {file = "safetensors-0.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:23fc9b4ec7b602915cbb4ec1a7c1ad96d2743c322f20ab709e2c35d1b66dad27"}, @@ -8873,6 +9720,8 @@ version = "2.231.0" description = "Open source library for training and deploying models on Amazon SageMaker." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sagemaker-2.231.0-py3-none-any.whl", hash = "sha256:5b6d84484a58c6ac8b22af42c6c5e0ea3c5f42d719345fe6aafba42f93635000"}, {file = "sagemaker-2.231.0.tar.gz", hash = "sha256:d49ee9c35725832dd9810708938af723201b831e82924a3a6ac1c4260a3d8239"}, @@ -8916,6 +9765,8 @@ version = "1.0.16" description = "An python package for sagemaker core functionalities" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sagemaker_core-1.0.16-py3-none-any.whl", hash = "sha256:603f70552c63d7a798b76749cad00a06af4b7362604a0f965d04b1c97f7a7128"}, {file = "sagemaker_core-1.0.16.tar.gz", hash = "sha256:a5e7325bb2d5ad84e9a34fa81ea9a6d36a3b6aa0f02bf9c356a7973476951def"}, @@ -8940,6 +9791,8 @@ version = "0.7.7" description = "Simple data validation library" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "schema-0.7.7-py2.py3-none-any.whl", hash = "sha256:5d976a5b50f36e74e2157b47097b60002bd4d42e65425fcc9c9befadb4255dde"}, {file = "schema-0.7.7.tar.gz", hash = "sha256:7da553abd2958a19dc2547c388cde53398b39196175a9be59ea1caf5ab0a1807"}, @@ -8951,6 +9804,8 @@ version = "1.5.2" description = "A set of python modules for machine learning and data mining" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "scikit_learn-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:299406827fb9a4f862626d0fe6c122f5f87f8910b86fe5daa4c32dcd742139b6"}, {file = "scikit_learn-1.5.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:2d4cad1119c77930b235579ad0dc25e65c917e756fe80cab96aa3b9428bd3fb0"}, @@ -9001,6 +9856,8 @@ version = "1.15.0" description = "Fundamental algorithms for scientific computing in Python" optional = false python-versions = ">=3.10" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "scipy-1.15.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:aeac60d3562a7bf2f35549bdfdb6b1751c50590f55ce7322b4b2fc821dc27fca"}, {file = "scipy-1.15.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5abbdc6ede5c5fed7910cf406a948e2c0869231c0db091593a6b2fa78be77e5d"}, @@ -9058,6 +9915,8 @@ version = "1.44.1" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sentry-sdk-1.44.1.tar.gz", hash = "sha256:24e6a53eeabffd2f95d952aa35ca52f0f4201d17f820ac9d3ff7244c665aaf68"}, {file = "sentry_sdk-1.44.1-py2.py3-none-any.whl", hash = "sha256:5f75eb91d8ab6037c754a87b8501cc581b2827e923682f593bed3539ce5b3999"}, @@ -9108,6 +9967,8 @@ version = "75.7.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "setuptools-75.7.0-py3-none-any.whl", hash = "sha256:84fb203f278ebcf5cd08f97d3fb96d3fbed4b629d500b29ad60d11e00769b183"}, {file = "setuptools-75.7.0.tar.gz", hash = "sha256:886ff7b16cd342f1d1defc16fc98c9ce3fde69e087a4e1983d7ab634e5f41f4f"}, @@ -9128,6 +9989,8 @@ version = "1.0.0" description = "Py3k port of sgmllib." optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"}, ] @@ -9138,6 +10001,8 @@ version = "2.0.6" description = "Manipulation and analysis of geometric objects" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "shapely-2.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a34e068da2d321e926b5073539fd2a1d4429a2c656bd63f0bd4c8f5b236d0b"}, {file = "shapely-2.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c84c3f53144febf6af909d6b581bc05e8785d57e27f35ebaa5c1ab9baba13b"}, @@ -9196,6 +10061,8 @@ version = "1.5.4" description = "Tool to Detect Surrounding Shell" optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, @@ -9207,6 +10074,8 @@ version = "1.1.0" description = "Simple WebSocket server and client for Python" optional = false python-versions = ">=3.6" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "simple_websocket-1.1.0-py3-none-any.whl", hash = "sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c"}, {file = "simple_websocket-1.1.0.tar.gz", hash = "sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4"}, @@ -9225,6 +10094,8 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -9236,6 +10107,8 @@ version = "1.0.1" description = "SMDebug RulesConfig" optional = false python-versions = ">=2.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "smdebug_rulesconfig-1.0.1-py2.py3-none-any.whl", hash = "sha256:104da3e6931ecf879dfc687ca4bbb3bee5ea2bc27f4478e9dbb3ee3655f1ae61"}, {file = "smdebug_rulesconfig-1.0.1.tar.gz", hash = "sha256:7a19e6eb2e6bcfefbc07e4a86ef7a88f32495001a038bf28c7d8e77ab793fcd6"}, @@ -9247,6 +10120,8 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -9258,6 +10133,8 @@ version = "1.0.0" description = "Sans-I/O implementation of SOCKS4, SOCKS4A, and SOCKS5." optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3"}, {file = "socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac"}, @@ -9269,6 +10146,8 @@ version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" +groups = ["main", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, @@ -9280,6 +10159,8 @@ version = "2.0.35" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" +groups = ["main", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, @@ -9367,6 +10248,8 @@ version = "0.5.3" description = "A non-validating SQL parser." optional = false python-versions = ">=3.8" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca"}, {file = "sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272"}, @@ -9382,6 +10265,8 @@ version = "0.41.0" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "starlette-0.41.0-py3-none-any.whl", hash = "sha256:a0193a3c413ebc9c78bff1c3546a45bb8c8bcb4a84cae8747d650a65bd37210a"}, {file = "starlette-0.41.0.tar.gz", hash = "sha256:39cbd8768b107d68bfe1ff1672b38a2c38b49777de46d2a592841d58e3bf7c2a"}, @@ -9399,6 +10284,8 @@ version = "0.8.2" description = "Supabase Storage client for Python." optional = false python-versions = "<4.0,>=3.9" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "storage3-0.8.2-py3-none-any.whl", hash = "sha256:f2e995b18c77a2a9265d1a33047d43e4d6abb11eb3ca5067959f68281c305de3"}, {file = "storage3-0.8.2.tar.gz", hash = "sha256:db05d3fe8fb73bd30c814c4c4749664f37a5dfc78b629e8c058ef558c2b89f5a"}, @@ -9415,6 +10302,8 @@ version = "1.7.3" description = "Strict, typed YAML parser" optional = false python-versions = ">=3.7.0" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "strictyaml-1.7.3-py3-none-any.whl", hash = "sha256:fb5c8a4edb43bebb765959e420f9b3978d7f1af88c80606c03fb420888f5d1c7"}, {file = "strictyaml-1.7.3.tar.gz", hash = "sha256:22f854a5fcab42b5ddba8030a0e4be51ca89af0267961c8d6cfa86395586c407"}, @@ -9429,6 +10318,8 @@ version = "2.8.1" description = "Supabase client for Python." optional = false python-versions = "<4.0,>=3.9" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "supabase-2.8.1-py3-none-any.whl", hash = "sha256:dfa8bef89b54129093521d5bba2136ff765baf67cd76d8ad0aa4984d61a7815c"}, {file = "supabase-2.8.1.tar.gz", hash = "sha256:711c70e6acd9e2ff48ca0dc0b1bb70c01c25378cc5189ec9f5ed9655b30bc41d"}, @@ -9449,6 +10340,8 @@ version = "0.6.2" description = "Library for Supabase Functions" optional = false python-versions = "<4.0,>=3.9" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "supafunc-0.6.2-py3-none-any.whl", hash = "sha256:101b30616b0a1ce8cf938eca1df362fa4cf1deacb0271f53ebbd674190fb0da5"}, {file = "supafunc-0.6.2.tar.gz", hash = "sha256:c7dfa20db7182f7fe4ae436e94e05c06cd7ed98d697fed75d68c7b9792822adc"}, @@ -9463,6 +10356,8 @@ version = "1.13.3" description = "Computer algebra system (CAS) in Python" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73"}, {file = "sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9"}, @@ -9480,6 +10375,8 @@ version = "0.9.0" description = "Pretty-print tabular data" optional = false python-versions = ">=3.7" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, @@ -9494,6 +10391,8 @@ version = "3.0.0" description = "Traceback serialization library." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tblib-3.0.0-py3-none-any.whl", hash = "sha256:80a6c77e59b55e83911e1e607c649836a69c103963c5f28a46cbeef44acf8129"}, {file = "tblib-3.0.0.tar.gz", hash = "sha256:93622790a0a29e04f0346458face1e144dc4d32f493714c6c3dff82a4adb77e6"}, @@ -9505,6 +10404,8 @@ version = "1.3.2" description = "Tencent VectorDB Python SDK" optional = false python-versions = ">=3" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tcvectordb-1.3.2-py3-none-any.whl", hash = "sha256:c4b6922d5df4cf14fcd3e61220d9374d1d53ec7270c254216ae35f8a752908f3"}, {file = "tcvectordb-1.3.2.tar.gz", hash = "sha256:2772f5871a69744ffc7c970b321312d626078533a721de3c744059a81aab419e"}, @@ -9520,6 +10421,8 @@ version = "9.0.0" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" +groups = ["main", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -9535,6 +10438,8 @@ version = "3.0.1298" description = "Tencent Cloud Common SDK for Python" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tencentcloud-sdk-python-common-3.0.1298.tar.gz", hash = "sha256:0f0f182410c1ceda5764ff8bcbef27aa6139caf1c5f5985d94ec731a41c8a59f"}, {file = "tencentcloud_sdk_python_common-3.0.1298-py2.py3-none-any.whl", hash = "sha256:c80929a0ff57ebee4ceec749dc82d5f2d1105b888e55175a7e9c722afc3a5d7a"}, @@ -9549,6 +10454,8 @@ version = "3.0.1298" description = "Tencent Cloud Hunyuan SDK for Python" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tencentcloud-sdk-python-hunyuan-3.0.1298.tar.gz", hash = "sha256:c3d86a577de02046d25682a3804955453555fa641082bb8765238460bded3f03"}, {file = "tencentcloud_sdk_python_hunyuan-3.0.1298-py2.py3-none-any.whl", hash = "sha256:f01e33318b6a4152ac88c500fda77f2cda1864eeca000cdd29c41e4f92f8de65"}, @@ -9563,6 +10470,8 @@ version = "2.5.0" description = "ANSI color formatting for output in terminal" optional = false python-versions = ">=3.9" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "termcolor-2.5.0-py3-none-any.whl", hash = "sha256:37b17b5fc1e604945c2642c872a3764b5d547a48009871aea3edd3afa180afb8"}, {file = "termcolor-2.5.0.tar.gz", hash = "sha256:998d8d27da6d48442e8e1f016119076b690d962507531df4890fcd2db2ef8a6f"}, @@ -9577,6 +10486,8 @@ version = "3.5.0" description = "threadpoolctl" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, @@ -9588,6 +10499,8 @@ version = "0.0.9" description = "A Python client for TiDB Vector" optional = false python-versions = "<4.0,>=3.8.1" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tidb_vector-0.0.9-py3-none-any.whl", hash = "sha256:db060ee1c981326d3882d0810e0b8b57811f278668f9381168997b360c4296c2"}, {file = "tidb_vector-0.0.9.tar.gz", hash = "sha256:e10680872532808e1bcffa7a92dd2b05bb65d63982f833edb3c6cd590dec7709"}, @@ -9605,6 +10518,8 @@ version = "0.8.0" description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tiktoken-0.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b07e33283463089c81ef1467180e3e00ab00d46c2c4bbcef0acab5f771d6695e"}, {file = "tiktoken-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9269348cb650726f44dd3bbb3f9110ac19a8dcc8f54949ad3ef652ca22a38e21"}, @@ -9652,6 +10567,8 @@ version = "0.3" description = "Very compact Japanese tokenizer" optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tinysegmenter-0.3.tar.gz", hash = "sha256:ed1f6d2e806a4758a73be589754384cbadadc7e1a414c81a166fc9adf2d40c6d"}, ] @@ -9662,6 +10579,8 @@ version = "5.1.3" description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." optional = false python-versions = ">=3.9" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tldextract-5.1.3-py3-none-any.whl", hash = "sha256:78de310cc2ca018692de5ddf320f9d6bd7c5cf857d0fd4f2175f0cdf4440ea75"}, {file = "tldextract-5.1.3.tar.gz", hash = "sha256:d43c7284c23f5dc8a42fd0fee2abede2ff74cc622674e4cb07f514ab3330c338"}, @@ -9683,6 +10602,8 @@ version = "0.15.2" description = "" optional = false python-versions = ">=3.7" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tokenizers-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:52f6130c9cbf70544287575a985bf44ae1bda2da7e8c24e97716080593638012"}, {file = "tokenizers-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:054c1cc9c6d68f7ffa4e810b3d5131e0ba511b6e4be34157aa08ee54c2f8d9ee"}, @@ -9810,6 +10731,8 @@ version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -9821,6 +10744,8 @@ version = "2.7.2" description = "Volc TOS (Tinder Object Storage) SDK" optional = false python-versions = "*" +groups = ["storage"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tos-2.7.2.tar.gz", hash = "sha256:3c31257716785bca7b2cac51474ff32543cda94075a7b7aff70d769c15c7b7ed"}, ] @@ -9838,6 +10763,8 @@ version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" +groups = ["main", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, @@ -9859,6 +10786,8 @@ version = "4.35.2" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" optional = false python-versions = ">=3.8.0" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "transformers-4.35.2-py3-none-any.whl", hash = "sha256:9dfa76f8692379544ead84d98f537be01cd1070de75c74efb13abcbc938fbe2f"}, {file = "transformers-4.35.2.tar.gz", hash = "sha256:2d125e197d77b0cdb6c9201df9fa7e2101493272e448b9fba9341c695bee2f52"}, @@ -9927,6 +10856,8 @@ version = "9.0.5" description = "Twilio API client and TwiML generator" optional = false python-versions = ">=3.7.0" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "twilio-9.0.5-py2.py3-none-any.whl", hash = "sha256:5e09e910b9368f50f23cb3c3dd5ba77164d80a81e9d97db955cbac322deb2a4e"}, {file = "twilio-9.0.5.tar.gz", hash = "sha256:e9b5727943584d25d618fe502f0100fc5283215f31c863f80b5c64581b4702b0"}, @@ -9944,6 +10875,8 @@ version = "0.15.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"}, {file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"}, @@ -9961,6 +10894,8 @@ version = "2024.2.0.20241221" description = "Typing stubs for pytz" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_pytz-2024.2.0.20241221-py3-none-any.whl", hash = "sha256:8fc03195329c43637ed4f593663df721fef919b60a969066e22606edf0b53ad5"}, {file = "types_pytz-2024.2.0.20241221.tar.gz", hash = "sha256:06d7cde9613e9f7504766a0554a270c369434b50e00975b3a4a0f6eed0f2c1a9"}, @@ -9972,6 +10907,8 @@ version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, @@ -9986,6 +10923,8 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -9997,6 +10936,8 @@ version = "0.9.0" description = "Runtime inspection utilities for typing module." optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, @@ -10012,6 +10953,8 @@ version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, @@ -10023,6 +10966,8 @@ version = "5.10.0" description = "Ultra fast JSON encoder and decoder for Python" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, @@ -10110,6 +11055,8 @@ version = "0.16.12" description = "A library that prepares raw documents for downstream ML tasks." optional = false python-versions = "<3.13,>=3.9.0" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "unstructured-0.16.12-py3-none-any.whl", hash = "sha256:bcac29ac1b38fba4228c5a1a7721d1aa7c48220f7c1dd43b563645c56e978c49"}, {file = "unstructured-0.16.12.tar.gz", hash = "sha256:c3133731c6edb9c2f474e62cb2b560cd0a8d578c4532ec14d8c0941e401770b0"}, @@ -10170,6 +11117,8 @@ version = "0.28.1" description = "Python Client SDK for Unstructured API" optional = false python-versions = "<4.0,>=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "unstructured_client-0.28.1-py3-none-any.whl", hash = "sha256:0112688908f544681a67abf314e0d2023dfa120c8e5d9fa6d31390b914a06d72"}, {file = "unstructured_client-0.28.1.tar.gz", hash = "sha256:aac11fe5dd6b8dfdbc15aad3205fe791a3834dac29bb9f499fd515643554f709"}, @@ -10194,6 +11143,8 @@ version = "0.6.0" description = "Serverless Vector SDK from Upstash" optional = false python-versions = "<4.0,>=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "upstash_vector-0.6.0-py3-none-any.whl", hash = "sha256:d0bdad7765b8a7f5c205b7a9c81ca4b9a4cee3ee4952afc7d5ea5fb76c3f3c3c"}, {file = "upstash_vector-0.6.0.tar.gz", hash = "sha256:a716ed4d0251362208518db8b194158a616d37d1ccbb1155f619df690599e39b"}, @@ -10208,6 +11159,8 @@ version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, @@ -10219,6 +11172,8 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, @@ -10230,12 +11185,27 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "uuid6" +version = "2024.7.10" +description = "New time-based UUID formats which are suited for use as a database key" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "uuid6-2024.7.10-py3-none-any.whl", hash = "sha256:93432c00ba403751f722829ad21759ff9db051dea140bf81493271e8e4dd18b7"}, + {file = "uuid6-2024.7.10.tar.gz", hash = "sha256:2d29d7f63f593caaeea0e0d0dd0ad8129c9c663b29e19bdf882e864bedf18fb0"}, +] + [[package]] name = "uvicorn" version = "0.34.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.9" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"}, {file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"}, @@ -10261,6 +11231,8 @@ version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" +groups = ["vdb"] +markers = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\" and (sys_platform != \"win32\" and sys_platform != \"cygwin\")" files = [ {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, @@ -10312,6 +11284,8 @@ version = "0.21.0" description = "Python Data Validation for Humans™" optional = false python-versions = ">=3.8,<4.0" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "validators-0.21.0-py3-none-any.whl", hash = "sha256:3470db6f2384c49727ee319afa2e97aec3f8fad736faa6067e0fd7f9eaf2c551"}, {file = "validators-0.21.0.tar.gz", hash = "sha256:245b98ab778ed9352a7269c6a8f6c2a839bed5b2a7e3e60273ce399d247dd4b3"}, @@ -10323,6 +11297,8 @@ version = "0.7.5" description = "Generate SQL queries from natural language" optional = false python-versions = ">=3.9" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "vanna-0.7.5-py3-none-any.whl", hash = "sha256:07458c7befa49de517a8760c2d80a13147278b484c515d49a906acc88edcb835"}, {file = "vanna-0.7.5.tar.gz", hash = "sha256:2fdffc58832898e4fc8e93c45b173424db59a22773b22ca348640161d391eacf"}, @@ -10384,6 +11360,8 @@ version = "5.1.0" description = "Python promises." optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"}, {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, @@ -10395,6 +11373,8 @@ version = "1.0.156" description = "Be Compatible with the Volcengine SDK for Python, The version of package dependencies has been modified. like pycryptodome, pytz." optional = false python-versions = "*" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "volcengine_compat-1.0.156-py3-none-any.whl", hash = "sha256:4abc149a7601ebad8fa2d28fab50c7945145cf74daecb71bca797b0bdc82c5a5"}, {file = "volcengine_compat-1.0.156.tar.gz", hash = "sha256:e357d096828e31a202dc6047bbc5bf6fff3f54a98cd35a99ab5f965ea741a267"}, @@ -10415,6 +11395,8 @@ version = "1.0.103" description = "Volcengine SDK for Python" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "volcengine-python-sdk-1.0.103.tar.gz", hash = "sha256:49fa8572802724972e1cb47a7e692b184b055f41b09099358c1a0fad1d146af5"}, ] @@ -10437,6 +11419,8 @@ version = "1.0.3" description = "Simple, modern and high performance file watching and code reload in python." optional = false python-versions = ">=3.9" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "watchfiles-1.0.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1da46bb1eefb5a37a8fb6fd52ad5d14822d67c498d99bda8754222396164ae42"}, {file = "watchfiles-1.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2b961b86cd3973f5822826017cad7f5a75795168cb645c3a6b30c349094e02e3"}, @@ -10520,6 +11504,8 @@ version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -10531,6 +11517,8 @@ version = "3.21.0" description = "A python native Weaviate client" optional = false python-versions = ">=3.8" +groups = ["vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "weaviate-client-3.21.0.tar.gz", hash = "sha256:ec94ac554883c765e94da8b2947c4f0fa4a0378ed3bbe9f3653df3a5b1745a6d"}, {file = "weaviate_client-3.21.0-py3-none-any.whl", hash = "sha256:420444ded7106fb000f4f8b2321b5f5fa2387825aa7a303d702accf61026f9d2"}, @@ -10551,6 +11539,8 @@ version = "0.5.1" description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -10562,6 +11552,8 @@ version = "1.7.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, @@ -10578,6 +11570,8 @@ version = "13.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.8" +groups = ["storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, @@ -10673,6 +11667,8 @@ version = "3.1.3" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.9" +groups = ["main", "tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, @@ -10690,6 +11686,8 @@ version = "1.4.0" description = "Wikipedia API for Python" optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wikipedia-1.4.0.tar.gz", hash = "sha256:db0fad1829fdd441b1852306e9856398204dc0786d2996dd2e0c8bb8e26133b2"}, ] @@ -10704,6 +11702,8 @@ version = "1.2.0" description = "A small Python utility to set file creation time on Windows" optional = false python-versions = ">=3.5" +groups = ["main"] +markers = "(python_version == \"3.11\" or python_version >= \"3.12\") and sys_platform == \"win32\"" files = [ {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, @@ -10718,6 +11718,8 @@ version = "1.17.0" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" +groups = ["main", "storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, @@ -10792,6 +11794,8 @@ version = "1.2.0" description = "WebSockets state-machine based protocol implementation" optional = false python-versions = ">=3.7.0" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, @@ -10806,6 +11810,8 @@ version = "0.15.2" description = "Client for Xinference" optional = false python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "xinference-client-0.15.2.tar.gz", hash = "sha256:5c2259bb133148d1cc9bd2b8ec6eb8b5bbeba7f11d6252959f4e6cd79baa53ed"}, {file = "xinference_client-0.15.2-py3-none-any.whl", hash = "sha256:b6275adab695e75e75a33e21e0ad212488fc2d5a4d0f693d544c0e78469abbe3"}, @@ -10825,6 +11831,8 @@ version = "2.0.1" description = "Library for developers to extract data from Microsoft Excel (tm) .xls spreadsheet files" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "xlrd-2.0.1-py2.py3-none-any.whl", hash = "sha256:6a33ee89877bd9abc1158129f6e94be74e2679636b8a205b43b85206c3f0bbdd"}, {file = "xlrd-2.0.1.tar.gz", hash = "sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88"}, @@ -10841,6 +11849,8 @@ version = "3.2.0" description = "A Python module for creating Excel XLSX files." optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "XlsxWriter-3.2.0-py3-none-any.whl", hash = "sha256:ecfd5405b3e0e228219bcaf24c2ca0915e012ca9464a14048021d21a995d490e"}, {file = "XlsxWriter-3.2.0.tar.gz", hash = "sha256:9977d0c661a72866a61f9f7a809e25ebbb0fb7036baa3b9fe74afcfca6b3cb8c"}, @@ -10852,6 +11862,8 @@ version = "0.14.2" description = "Makes working with XML feel like you are working with JSON" optional = false python-versions = ">=3.6" +groups = ["storage", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, @@ -10863,6 +11875,8 @@ version = "1.18.3" description = "Yet another URL library" optional = false python-versions = ">=3.9" +groups = ["main", "storage", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -10959,6 +11973,8 @@ version = "0.2.51" description = "Download market data from Yahoo! Finance API" optional = false python-versions = "*" +groups = ["tools"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "yfinance-0.2.51-py2.py3-none-any.whl", hash = "sha256:d5cc7a970bb4bb43e4deee853514cbaa3c2b070a0dee6b2861c1ab5076f21dc1"}, {file = "yfinance-0.2.51.tar.gz", hash = "sha256:7902cc9b23699a51efa50f1cc7a965220a56beccc00d189f929b4c7c5c189a60"}, @@ -10987,6 +12003,8 @@ version = "0.6.3" description = "This is an python API which allows you to get the transcripts/subtitles for a given YouTube video. It also works for automatically generated subtitles, supports translating subtitles and it does not require a headless browser, like other selenium based solutions do!" optional = false python-versions = "<3.14,>=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "youtube_transcript_api-0.6.3-py3-none-any.whl", hash = "sha256:297a74c1863d9df88f6885229f33a7eda61493d73ecb13ec80e876b65423e9b4"}, {file = "youtube_transcript_api-0.6.3.tar.gz", hash = "sha256:4d1f6451ae508390a5279f98519efb45e091bf60d3cca5ea0bb122800ab6a011"}, @@ -11002,6 +12020,8 @@ version = "2.1.5.20250106" description = "A SDK library for accessing big model apis from ZhipuAI" optional = false python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "zhipuai-2.1.5.20250106-py3-none-any.whl", hash = "sha256:ca76095f32db501e36038fc1ac4b287b88ed90c4cdd28902d3b1a9365fff879b"}, {file = "zhipuai-2.1.5.20250106.tar.gz", hash = "sha256:45d391be336a210b360f126443f07882fa6d8184a148c46a8c7d0b7607d6d1f8"}, @@ -11020,6 +12040,8 @@ version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" +groups = ["main", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, @@ -11039,6 +12061,8 @@ version = "5.0" description = "Very basic event publishing system" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, @@ -11057,6 +12081,8 @@ version = "7.2" description = "Interfaces for Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "zope.interface-7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce290e62229964715f1011c3dbeab7a4a1e4971fd6f31324c4519464473ef9f2"}, {file = "zope.interface-7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05b910a5afe03256b58ab2ba6288960a2892dfeef01336dc4be6f1b9ed02ab0a"}, @@ -11111,6 +12137,8 @@ version = "0.23.0" description = "Zstandard bindings for Python" optional = false python-versions = ">=3.8" +groups = ["main", "tools", "vdb"] +markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, @@ -11212,12 +12240,12 @@ files = [ ] [package.dependencies] -cffi = {version = ">=1.11", optional = true, markers = "platform_python_implementation == \"PyPy\" or extra == \"cffi\""} +cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} [package.extras] cffi = ["cffi (>=1.11)"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.11,<3.13" -content-hash = "907718f7ca775ad226c1f668f4bb6c6dbfa6cacc556fce43a8ad0b6f3c35095a" +content-hash = "fdc2199389f0e4b6d81b4b7fe2c1d303b1995643fe802ad3a28b196e68c258ae" diff --git a/api/pyproject.toml b/api/pyproject.toml index f8c6f599d1..12455a0e63 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,9 +1,10 @@ [project] name = "dify-api" requires-python = ">=3.11,<3.13" +dynamic = [ "dependencies" ] [build-system] -requires = ["poetry-core"] +requires = ["poetry-core>=2.0.0"] build-backend = "poetry.core.masonry.api" [tool.poetry] @@ -59,6 +60,7 @@ numpy = "~1.26.4" oci = "~2.135.1" openai = "~1.52.0" openpyxl = "~3.1.5" +opik = "~1.3.4" pandas = { version = "~2.2.2", extras = ["performance", "excel"] } pandas-stubs = "~2.2.3.241009" psycogreen = "~1.0.2" @@ -190,4 +192,4 @@ pytest-mock = "~3.14.0" optional = true [tool.poetry.group.lint.dependencies] dotenv-linter = "~0.5.0" -ruff = "~0.8.1" +ruff = "~0.9.2" diff --git a/api/services/app_dsl_service.py b/api/services/app_dsl_service.py index f81ce8393e..15119247f8 100644 --- a/api/services/app_dsl_service.py +++ b/api/services/app_dsl_service.py @@ -1,7 +1,7 @@ import logging import uuid from enum import StrEnum -from typing import Optional, cast +from typing import Optional from urllib.parse import urlparse from uuid import uuid4 @@ -139,15 +139,6 @@ class AppDslService: status=ImportStatus.FAILED, error="Empty content from url", ) - - try: - content = cast(bytes, content).decode("utf-8") - except UnicodeDecodeError as e: - return Import( - id=import_id, - status=ImportStatus.FAILED, - error=f"Error decoding content: {e}", - ) except Exception as e: return Import( id=import_id, diff --git a/api/services/audio_service.py b/api/services/audio_service.py index f4178a69a4..294dfe4c8c 100644 --- a/api/services/audio_service.py +++ b/api/services/audio_service.py @@ -82,7 +82,7 @@ class AudioService: from app import app from extensions.ext_database import db - def invoke_tts(text_content: str, app_model, voice: Optional[str] = None): + def invoke_tts(text_content: str, app_model: App, voice: Optional[str] = None): with app.app_context(): if app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}: workflow = app_model.workflow @@ -95,6 +95,8 @@ class AudioService: voice = features_dict["text_to_speech"].get("voice") if voice is None else voice else: + if app_model.app_model_config is None: + raise ValueError("AppModelConfig not found") text_to_speech_dict = app_model.app_model_config.text_to_speech_dict if not text_to_speech_dict.get("enabled"): diff --git a/api/services/billing_service.py b/api/services/billing_service.py index 9d9dd8a368..0d50a2aa8c 100644 --- a/api/services/billing_service.py +++ b/api/services/billing_service.py @@ -19,14 +19,6 @@ class BillingService: billing_info = cls._send_request("GET", "/subscription/info", params=params) return billing_info - @classmethod - def get_knowledge_rate_limit(cls, tenant_id: str): - params = {"tenant_id": tenant_id} - - knowledge_rate_limit = cls._send_request("GET", "/subscription/knowledge-rate-limit", params=params) - - return knowledge_rate_limit.get("limit", 10) - @classmethod def get_subscription(cls, plan: str, interval: str, prefilled_email: str = "", tenant_id: str = ""): params = {"plan": plan, "interval": interval, "prefilled_email": prefilled_email, "tenant_id": tenant_id} diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index dac0a6a772..c405933736 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -4,6 +4,7 @@ import logging import random import time import uuid +from collections import Counter from typing import Any, Optional from flask_login import current_user # type: ignore @@ -221,8 +222,7 @@ class DatasetService: ) except LLMBadRequestError: raise ValueError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." ) except ProviderTokenNotInitError as ex: raise ValueError(f"The dataset in unavailable, due to: {ex.description}") @@ -859,7 +859,7 @@ class DocumentService: position = DocumentService.get_documents_position(dataset.id) document_ids = [] duplicate_document_ids = [] - if knowledge_config.data_source.info_list.data_source_type == "upload_file": + if knowledge_config.data_source.info_list.data_source_type == "upload_file": # type: ignore upload_file_list = knowledge_config.data_source.info_list.file_info_list.file_ids # type: ignore for file_id in upload_file_list: file = ( @@ -901,7 +901,7 @@ class DocumentService: document = DocumentService.build_document( dataset, dataset_process_rule.id, # type: ignore - knowledge_config.data_source.info_list.data_source_type, + knowledge_config.data_source.info_list.data_source_type, # type: ignore knowledge_config.doc_form, knowledge_config.doc_language, data_source_info, @@ -916,8 +916,8 @@ class DocumentService: document_ids.append(document.id) documents.append(document) position += 1 - elif knowledge_config.data_source.info_list.data_source_type == "notion_import": - notion_info_list = knowledge_config.data_source.info_list.notion_info_list + elif knowledge_config.data_source.info_list.data_source_type == "notion_import": # type: ignore + notion_info_list = knowledge_config.data_source.info_list.notion_info_list # type: ignore if not notion_info_list: raise ValueError("No notion info list found.") exist_page_ids = [] @@ -956,7 +956,7 @@ class DocumentService: document = DocumentService.build_document( dataset, dataset_process_rule.id, # type: ignore - knowledge_config.data_source.info_list.data_source_type, + knowledge_config.data_source.info_list.data_source_type, # type: ignore knowledge_config.doc_form, knowledge_config.doc_language, data_source_info, @@ -976,8 +976,8 @@ class DocumentService: # delete not selected documents if len(exist_document) > 0: clean_notion_document_task.delay(list(exist_document.values()), dataset.id) - elif knowledge_config.data_source.info_list.data_source_type == "website_crawl": - website_info = knowledge_config.data_source.info_list.website_info_list + elif knowledge_config.data_source.info_list.data_source_type == "website_crawl": # type: ignore + website_info = knowledge_config.data_source.info_list.website_info_list # type: ignore if not website_info: raise ValueError("No website info list found.") urls = website_info.urls @@ -996,7 +996,7 @@ class DocumentService: document = DocumentService.build_document( dataset, dataset_process_rule.id, # type: ignore - knowledge_config.data_source.info_list.data_source_type, + knowledge_config.data_source.info_list.data_source_type, # type: ignore knowledge_config.doc_form, knowledge_config.doc_language, data_source_info, @@ -1195,20 +1195,20 @@ class DocumentService: if features.billing.enabled: count = 0 - if knowledge_config.data_source.info_list.data_source_type == "upload_file": + if knowledge_config.data_source.info_list.data_source_type == "upload_file": # type: ignore upload_file_list = ( - knowledge_config.data_source.info_list.file_info_list.file_ids - if knowledge_config.data_source.info_list.file_info_list + knowledge_config.data_source.info_list.file_info_list.file_ids # type: ignore + if knowledge_config.data_source.info_list.file_info_list # type: ignore else [] ) count = len(upload_file_list) - elif knowledge_config.data_source.info_list.data_source_type == "notion_import": - notion_info_list = knowledge_config.data_source.info_list.notion_info_list + elif knowledge_config.data_source.info_list.data_source_type == "notion_import": # type: ignore + notion_info_list = knowledge_config.data_source.info_list.notion_info_list # type: ignore if notion_info_list: for notion_info in notion_info_list: count = count + len(notion_info.pages) - elif knowledge_config.data_source.info_list.data_source_type == "website_crawl": - website_info = knowledge_config.data_source.info_list.website_info_list + elif knowledge_config.data_source.info_list.data_source_type == "website_crawl": # type: ignore + website_info = knowledge_config.data_source.info_list.website_info_list # type: ignore if website_info: count = len(website_info.urls) batch_upload_limit = int(dify_config.BATCH_UPLOAD_LIMIT) @@ -1239,7 +1239,7 @@ class DocumentService: dataset = Dataset( tenant_id=tenant_id, name="", - data_source_type=knowledge_config.data_source.info_list.data_source_type, + data_source_type=knowledge_config.data_source.info_list.data_source_type, # type: ignore indexing_technique=knowledge_config.indexing_technique, created_by=account.id, embedding_model=knowledge_config.embedding_model, @@ -1611,8 +1611,11 @@ class SegmentService: segment.answer = args.answer segment.word_count += len(args.answer) if args.answer else 0 word_count_change = segment.word_count - word_count_change + keyword_changed = False if args.keywords: - segment.keywords = args.keywords + if Counter(segment.keywords) != Counter(args.keywords): + segment.keywords = args.keywords + keyword_changed = True segment.enabled = True segment.disabled_at = None segment.disabled_by = None @@ -1623,13 +1626,6 @@ class SegmentService: document.word_count = max(0, document.word_count + word_count_change) db.session.add(document) # update segment index task - if args.enabled: - VectorService.create_segments_vector( - [args.keywords] if args.keywords else None, - [segment], - dataset, - document.doc_form, - ) if document.doc_form == IndexType.PARENT_CHILD_INDEX and args.regenerate_child_chunks: # regenerate child chunks # get embedding model instance @@ -1662,6 +1658,14 @@ class SegmentService: VectorService.generate_child_chunks( segment, document, dataset, embedding_model_instance, processing_rule, True ) + elif document.doc_form in (IndexType.PARAGRAPH_INDEX, IndexType.QA_INDEX): + if args.enabled or keyword_changed: + VectorService.create_segments_vector( + [args.keywords] if args.keywords else None, + [segment], + dataset, + document.doc_form, + ) else: segment_hash = helper.generate_text_hash(content) tokens = 0 diff --git a/api/services/entities/knowledge_entities/knowledge_entities.py b/api/services/entities/knowledge_entities/knowledge_entities.py index 76d9c28812..8d6a246b64 100644 --- a/api/services/entities/knowledge_entities/knowledge_entities.py +++ b/api/services/entities/knowledge_entities/knowledge_entities.py @@ -97,7 +97,7 @@ class KnowledgeConfig(BaseModel): original_document_id: Optional[str] = None duplicate: bool = True indexing_technique: Literal["high_quality", "economy"] - data_source: DataSource + data_source: Optional[DataSource] = None process_rule: Optional[ProcessRule] = None retrieval_model: Optional[RetrievalModel] = None doc_form: str = "text_model" diff --git a/api/services/external_knowledge_service.py b/api/services/external_knowledge_service.py index 898624066b..8916a951c7 100644 --- a/api/services/external_knowledge_service.py +++ b/api/services/external_knowledge_service.py @@ -155,7 +155,7 @@ class ExternalDatasetService: if custom_parameters: for parameter in custom_parameters: if parameter.get("required", False) and not process_parameter.get(parameter.get("name")): - raise ValueError(f'{parameter.get("name")} is required') + raise ValueError(f"{parameter.get('name')} is required") @staticmethod def process_external_api( diff --git a/api/services/feature_service.py b/api/services/feature_service.py index 52cfe4f2cb..b9261d19d7 100644 --- a/api/services/feature_service.py +++ b/api/services/feature_service.py @@ -41,7 +41,6 @@ class FeatureModel(BaseModel): members: LimitationModel = LimitationModel(size=0, limit=1) apps: LimitationModel = LimitationModel(size=0, limit=10) vector_space: LimitationModel = LimitationModel(size=0, limit=5) - knowledge_rate_limit: int = 10 annotation_quota_limit: LimitationModel = LimitationModel(size=0, limit=10) documents_upload_quota: LimitationModel = LimitationModel(size=0, limit=50) docs_processing: str = "standard" @@ -53,11 +52,6 @@ class FeatureModel(BaseModel): model_config = ConfigDict(protected_namespaces=()) -class KnowledgeRateLimitModel(BaseModel): - enabled: bool = False - limit: int = 10 - - class SystemFeatureModel(BaseModel): sso_enforced_for_signin: bool = False sso_enforced_for_signin_protocol: str = "" @@ -85,14 +79,6 @@ class FeatureService: return features - @classmethod - def get_knowledge_rate_limit(cls, tenant_id: str): - knowledge_rate_limit = KnowledgeRateLimitModel() - if dify_config.BILLING_ENABLED and tenant_id: - knowledge_rate_limit.enabled = True - knowledge_rate_limit.limit = BillingService.get_knowledge_rate_limit(tenant_id) - return knowledge_rate_limit - @classmethod def get_system_features(cls) -> SystemFeatureModel: system_features = SystemFeatureModel() @@ -158,9 +144,6 @@ class FeatureService: if "model_load_balancing_enabled" in billing_info: features.model_load_balancing_enabled = billing_info["model_load_balancing_enabled"] - if "knowledge_rate_limit" in billing_info: - features.knowledge_rate_limit = billing_info["knowledge_rate_limit"]["limit"] - @classmethod def _fulfill_params_from_enterprise(cls, features): enterprise_info = EnterpriseService.get_info() diff --git a/api/services/ops_service.py b/api/services/ops_service.py index fc1e08518b..78340d2bcc 100644 --- a/api/services/ops_service.py +++ b/api/services/ops_service.py @@ -59,6 +59,15 @@ class OpsService: except Exception: new_decrypt_tracing_config.update({"project_url": "https://smith.langchain.com/"}) + if tracing_provider == "opik" and ( + "project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url") + ): + try: + project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider) + new_decrypt_tracing_config.update({"project_url": project_url}) + except Exception: + new_decrypt_tracing_config.update({"project_url": "https://www.comet.com/opik/"}) + trace_config_data.tracing_config = new_decrypt_tracing_config return trace_config_data.to_dict() @@ -92,7 +101,7 @@ class OpsService: if tracing_provider == "langfuse": project_key = OpsTraceManager.get_trace_config_project_key(tracing_config, tracing_provider) project_url = "{host}/project/{key}".format(host=tracing_config.get("host"), key=project_key) - elif tracing_provider == "langsmith": + elif tracing_provider in ("langsmith", "opik"): project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider) else: project_url = None diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py index dbef6b708e..e2d2392797 100644 --- a/api/tasks/batch_create_segment_to_index_task.py +++ b/api/tasks/batch_create_segment_to_index_task.py @@ -5,7 +5,8 @@ import uuid import click from celery import shared_task # type: ignore -from sqlalchemy import func +from sqlalchemy import func, select +from sqlalchemy.orm import Session from core.model_manager import ModelManager from core.model_runtime.entities.model_entities import ModelType @@ -18,7 +19,12 @@ from services.vector_service import VectorService @shared_task(queue="dataset") def batch_create_segment_to_index_task( - job_id: str, content: list, dataset_id: str, document_id: str, tenant_id: str, user_id: str + job_id: str, + content: list, + dataset_id: str, + document_id: str, + tenant_id: str, + user_id: str, ): """ Async batch create segment to index @@ -37,71 +43,80 @@ def batch_create_segment_to_index_task( indexing_cache_key = "segment_batch_import_{}".format(job_id) try: - dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() - if not dataset: - raise ValueError("Dataset not exist.") + with Session(db.engine) as session: + dataset = session.get(Dataset, dataset_id) + if not dataset: + raise ValueError("Dataset not exist.") - dataset_document = db.session.query(Document).filter(Document.id == document_id).first() - if not dataset_document: - raise ValueError("Document not exist.") + dataset_document = session.get(Document, document_id) + if not dataset_document: + raise ValueError("Document not exist.") - if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": - raise ValueError("Document is not available.") - document_segments = [] - embedding_model = None - if dataset.indexing_technique == "high_quality": - model_manager = ModelManager() - embedding_model = model_manager.get_model_instance( - tenant_id=dataset.tenant_id, - provider=dataset.embedding_model_provider, - model_type=ModelType.TEXT_EMBEDDING, - model=dataset.embedding_model, + if ( + not dataset_document.enabled + or dataset_document.archived + or dataset_document.indexing_status != "completed" + ): + raise ValueError("Document is not available.") + document_segments = [] + embedding_model = None + if dataset.indexing_technique == "high_quality": + model_manager = ModelManager() + embedding_model = model_manager.get_model_instance( + tenant_id=dataset.tenant_id, + provider=dataset.embedding_model_provider, + model_type=ModelType.TEXT_EMBEDDING, + model=dataset.embedding_model, + ) + word_count_change = 0 + segments_to_insert: list[str] = [] + max_position_stmt = select(func.max(DocumentSegment.position)).where( + DocumentSegment.document_id == dataset_document.id ) - word_count_change = 0 - segments_to_insert: list[str] = [] # Explicitly type hint the list as List[str] - for segment in content: - content_str = segment["content"] - doc_id = str(uuid.uuid4()) - segment_hash = helper.generate_text_hash(content_str) - # calc embedding use tokens - tokens = embedding_model.get_text_embedding_num_tokens(texts=[content_str]) if embedding_model else 0 - max_position = ( - db.session.query(func.max(DocumentSegment.position)) - .filter(DocumentSegment.document_id == dataset_document.id) - .scalar() - ) - segment_document = DocumentSegment( - tenant_id=tenant_id, - dataset_id=dataset_id, - document_id=document_id, - index_node_id=doc_id, - index_node_hash=segment_hash, - position=max_position + 1 if max_position else 1, - content=content_str, - word_count=len(content_str), - tokens=tokens, - created_by=user_id, - indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), - status="completed", - completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), - ) - if dataset_document.doc_form == "qa_model": - segment_document.answer = segment["answer"] - segment_document.word_count += len(segment["answer"]) - word_count_change += segment_document.word_count - db.session.add(segment_document) - document_segments.append(segment_document) - segments_to_insert.append(str(segment)) # Cast to string if needed - # update document word count - dataset_document.word_count += word_count_change - db.session.add(dataset_document) - # add index to db - VectorService.create_segments_vector(None, document_segments, dataset, dataset_document.doc_form) - db.session.commit() + max_position = session.scalar(max_position_stmt) or 1 + for segment in content: + content_str = segment["content"] + doc_id = str(uuid.uuid4()) + segment_hash = helper.generate_text_hash(content_str) + # calc embedding use tokens + tokens = embedding_model.get_text_embedding_num_tokens(texts=[content_str]) if embedding_model else 0 + segment_document = DocumentSegment( + tenant_id=tenant_id, + dataset_id=dataset_id, + document_id=document_id, + index_node_id=doc_id, + index_node_hash=segment_hash, + position=max_position, + content=content_str, + word_count=len(content_str), + tokens=tokens, + created_by=user_id, + indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + status="completed", + completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + ) + max_position += 1 + if dataset_document.doc_form == "qa_model": + segment_document.answer = segment["answer"] + segment_document.word_count += len(segment["answer"]) + word_count_change += segment_document.word_count + session.add(segment_document) + document_segments.append(segment_document) + segments_to_insert.append(str(segment)) # Cast to string if needed + # update document word count + dataset_document.word_count += word_count_change + session.add(dataset_document) + # add index to db + VectorService.create_segments_vector(None, document_segments, dataset, dataset_document.doc_form) + session.commit() + redis_client.setex(indexing_cache_key, 600, "completed") end_at = time.perf_counter() logging.info( - click.style("Segment batch created job: {} latency: {}".format(job_id, end_at - start_at), fg="green") + click.style( + "Segment batch created job: {} latency: {}".format(job_id, end_at - start_at), + fg="green", + ) ) except Exception as e: logging.exception("Segments batch created index failed") diff --git a/api/tests/artifact_tests/dependencies/test_dependencies_sorted.py b/api/tests/artifact_tests/dependencies/test_dependencies_sorted.py index 57fba31763..0ec0783112 100644 --- a/api/tests/artifact_tests/dependencies/test_dependencies_sorted.py +++ b/api/tests/artifact_tests/dependencies/test_dependencies_sorted.py @@ -44,6 +44,6 @@ def test_duplicated_dependency_crossing_groups() -> None: dependency_names = list(dependencies.keys()) all_dependency_names.extend(dependency_names) expected_all_dependency_names = set(all_dependency_names) - assert sorted(expected_all_dependency_names) == sorted( - all_dependency_names - ), "Duplicated dependencies crossing groups are found" + assert sorted(expected_all_dependency_names) == sorted(all_dependency_names), ( + "Duplicated dependencies crossing groups are found" + ) diff --git a/api/tests/integration_tests/vdb/opensearch/test_opensearch.py b/api/tests/integration_tests/vdb/opensearch/test_opensearch.py index 2666ce2e1e..35eed75c2f 100644 --- a/api/tests/integration_tests/vdb/opensearch/test_opensearch.py +++ b/api/tests/integration_tests/vdb/opensearch/test_opensearch.py @@ -89,9 +89,9 @@ class TestOpenSearchVector: print("Actual document ID:", hits_by_vector[0].metadata["document_id"] if hits_by_vector else "No hits") assert len(hits_by_vector) > 0, f"Expected at least one hit, got {len(hits_by_vector)}" - assert ( - hits_by_vector[0].metadata["document_id"] == self.example_doc_id - ), f"Expected document ID {self.example_doc_id}, got {hits_by_vector[0].metadata['document_id']}" + assert hits_by_vector[0].metadata["document_id"] == self.example_doc_id, ( + f"Expected document ID {self.example_doc_id}, got {hits_by_vector[0].metadata['document_id']}" + ) def test_get_ids_by_metadata_field(self): mock_response = {"hits": {"total": {"value": 1}, "hits": [{"_id": "mock_id"}]}} diff --git a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py index 7e979bcaa8..74af5eb56b 100644 --- a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py @@ -438,9 +438,9 @@ def test_fetch_prompt_messages__basic(faker, llm_node, model_config): # Verify the result assert len(prompt_messages) == len(scenario.expected_messages), f"Scenario failed: {scenario.description}" - assert ( - prompt_messages == scenario.expected_messages - ), f"Message content mismatch in scenario: {scenario.description}" + assert prompt_messages == scenario.expected_messages, ( + f"Message content mismatch in scenario: {scenario.description}" + ) def test_handle_list_messages_basic(llm_node): diff --git a/api/tests/unit_tests/services/workflow/test_workflow_converter.py b/api/tests/unit_tests/services/workflow/test_workflow_converter.py index 805d92dfc9..0a09167349 100644 --- a/api/tests/unit_tests/services/workflow/test_workflow_converter.py +++ b/api/tests/unit_tests/services/workflow/test_workflow_converter.py @@ -401,8 +401,7 @@ def test__convert_to_llm_node_for_workflow_advanced_completion_model(default_var prompt_template = PromptTemplateEntity( prompt_type=PromptTemplateEntity.PromptType.ADVANCED, advanced_completion_prompt_template=AdvancedCompletionPromptTemplateEntity( - prompt="You are a helpful assistant named {{name}}.\n\nContext:\n{{#context#}}\n\n" - "Human: hi\nAssistant: ", + prompt="You are a helpful assistant named {{name}}.\n\nContext:\n{{#context#}}\n\nHuman: hi\nAssistant: ", role_prefix=AdvancedCompletionPromptTemplateEntity.RolePrefixEntity(user="Human", assistant="Assistant"), ), ) diff --git a/dev/reformat b/dev/reformat index 94a7f3e6fe..82f96b8e8f 100755 --- a/dev/reformat +++ b/dev/reformat @@ -9,10 +9,10 @@ if ! command -v ruff &> /dev/null || ! command -v dotenv-linter &> /dev/null; th fi # run ruff linter -poetry run -C api ruff check --fix ./api +poetry run -C api ruff check --fix ./ # run ruff formatter -poetry run -C api ruff format ./api +poetry run -C api ruff format ./ # run dotenv-linter linter -poetry run -C api dotenv-linter ./api/.env.example ./web/.env.example +poetry run -P api dotenv-linter ./api/.env.example ./web/.env.example diff --git a/dev/sync-poetry b/dev/sync-poetry index 23d5d79e90..766382da01 100755 --- a/dev/sync-poetry +++ b/dev/sync-poetry @@ -12,7 +12,7 @@ if [ $? -ne 0 ]; then # update poetry.lock # refreshing lockfile only without updating locked versions echo "poetry.lock is outdated, refreshing without updating locked versions ..." - poetry lock -C api --no-update + poetry lock -C api else echo "poetry.lock is ready." fi diff --git a/docker-legacy/docker-compose.yaml b/docker-legacy/docker-compose.yaml index c8bf382bcd..6e4c8a748e 100644 --- a/docker-legacy/docker-compose.yaml +++ b/docker-legacy/docker-compose.yaml @@ -2,7 +2,7 @@ version: '3' services: # API service api: - image: langgenius/dify-api:0.15.0 + image: langgenius/dify-api:0.15.1 restart: always environment: # Startup mode, 'api' starts the API server. @@ -227,7 +227,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:0.15.0 + image: langgenius/dify-api:0.15.1 restart: always environment: CONSOLE_WEB_URL: '' @@ -397,7 +397,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:0.15.0 + image: langgenius/dify-web:0.15.1 restart: always environment: # The base URL of console application api server, refers to the Console base URL of WEB service if console domain is diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 6d70f14424..d24e7c181f 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:0.15.0 + image: langgenius/dify-api:0.15.1 restart: always environment: # Use the shared environment variables. @@ -25,7 +25,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:0.15.0 + image: langgenius/dify-api:0.15.1 restart: always environment: # Use the shared environment variables. @@ -47,7 +47,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:0.15.0 + image: langgenius/dify-web:0.15.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -57,6 +57,7 @@ services: TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000} CSP_WHITELIST: ${CSP_WHITELIST:-} TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-} + INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-} # The postgres database. db: diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 173a88bc4c..21e72a4cd6 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -393,7 +393,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:0.15.0 + image: langgenius/dify-api:0.15.1 restart: always environment: # Use the shared environment variables. @@ -416,7 +416,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:0.15.0 + image: langgenius/dify-api:0.15.1 restart: always environment: # Use the shared environment variables. @@ -438,7 +438,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:0.15.0 + image: langgenius/dify-web:0.15.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -448,6 +448,7 @@ services: TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000} CSP_WHITELIST: ${CSP_WHITELIST:-} TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-} + INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-} # The postgres database. db: diff --git a/web/.env.example b/web/.env.example index 2decef02fa..e2117ddfd8 100644 --- a/web/.env.example +++ b/web/.env.example @@ -28,3 +28,6 @@ NEXT_PUBLIC_CSP_WHITELIST= # The maximum number of top-k value for RAG. NEXT_PUBLIC_TOP_K_MAX_VALUE=10 + +# The maximum number of tokens for segmentation +NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000 diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx index 8e3d8f9ec6..17f46c258d 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx @@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' import TracingIcon from './tracing-icon' import ProviderPanel from './provider-panel' -import type { LangFuseConfig, LangSmithConfig } from './type' +import type { LangFuseConfig, LangSmithConfig, OpikConfig } from './type' import { TracingProvider } from './type' import ProviderConfigModal from './provider-config-modal' import Indicator from '@/app/components/header/indicator' @@ -23,7 +23,8 @@ export type PopupProps = { onChooseProvider: (provider: TracingProvider) => void langSmithConfig: LangSmithConfig | null langFuseConfig: LangFuseConfig | null - onConfigUpdated: (provider: TracingProvider, payload: LangSmithConfig | LangFuseConfig) => void + opikConfig: OpikConfig | null + onConfigUpdated: (provider: TracingProvider, payload: LangSmithConfig | LangFuseConfig | OpikConfig) => void onConfigRemoved: (provider: TracingProvider) => void } @@ -36,6 +37,7 @@ const ConfigPopup: FC = ({ onChooseProvider, langSmithConfig, langFuseConfig, + opikConfig, onConfigUpdated, onConfigRemoved, }) => { @@ -59,7 +61,7 @@ const ConfigPopup: FC = ({ } }, [onChooseProvider]) - const handleConfigUpdated = useCallback((payload: LangSmithConfig | LangFuseConfig) => { + const handleConfigUpdated = useCallback((payload: LangSmithConfig | LangFuseConfig | OpikConfig) => { onConfigUpdated(currentProvider!, payload) hideConfigModal() }, [currentProvider, hideConfigModal, onConfigUpdated]) @@ -69,8 +71,8 @@ const ConfigPopup: FC = ({ hideConfigModal() }, [currentProvider, hideConfigModal, onConfigRemoved]) - const providerAllConfigured = langSmithConfig && langFuseConfig - const providerAllNotConfigured = !langSmithConfig && !langFuseConfig + const providerAllConfigured = langSmithConfig && langFuseConfig && opikConfig + const providerAllNotConfigured = !langSmithConfig && !langFuseConfig && !opikConfig const switchContent = ( = ({ onConfig={handleOnConfig(TracingProvider.langSmith)} isChosen={chosenProvider === TracingProvider.langSmith} onChoose={handleOnChoose(TracingProvider.langSmith)} + key="langSmith-provider-panel" /> ) @@ -102,9 +105,61 @@ const ConfigPopup: FC = ({ onConfig={handleOnConfig(TracingProvider.langfuse)} isChosen={chosenProvider === TracingProvider.langfuse} onChoose={handleOnChoose(TracingProvider.langfuse)} + key="langfuse-provider-panel" /> ) + const opikPanel = ( + + ) + + const configuredProviderPanel = () => { + const configuredPanels: ProviderPanel[] = [] + + if (langSmithConfig) + configuredPanels.push(langSmithPanel) + + if (langFuseConfig) + configuredPanels.push(langfusePanel) + + if (opikConfig) + configuredPanels.push(opikPanel) + + return configuredPanels + } + + const moreProviderPanel = () => { + const notConfiguredPanels: ProviderPanel[] = [] + + if (!langSmithConfig) + notConfiguredPanels.push(langSmithPanel) + + if (!langFuseConfig) + notConfiguredPanels.push(langfusePanel) + + if (!opikConfig) + notConfiguredPanels.push(opikPanel) + + return notConfiguredPanels + } + + const configuredProviderConfig = () => { + if (currentProvider === TracingProvider.langSmith) + return langSmithConfig + if (currentProvider === TracingProvider.langfuse) + return langFuseConfig + return opikConfig + } + return (
@@ -146,18 +201,19 @@ const ConfigPopup: FC = ({
{langSmithPanel} {langfusePanel} + {opikPanel}
) : ( <>
{t(`${I18N_PREFIX}.configProviderTitle.configured`)}
-
- {langSmithConfig ? langSmithPanel : langfusePanel} +
+ {configuredProviderPanel()}
{t(`${I18N_PREFIX}.configProviderTitle.moreProvider`)}
-
- {!langSmithConfig ? langSmithPanel : langfusePanel} +
+ {moreProviderPanel()}
)} @@ -167,7 +223,7 @@ const ConfigPopup: FC = ({ { }) } const inUseTracingProvider: TracingProvider | null = tracingStatus?.tracing_provider || null - const InUseProviderIcon = inUseTracingProvider === TracingProvider.langSmith ? LangsmithIcon : LangfuseIcon + + const InUseProviderIcon + = inUseTracingProvider === TracingProvider.langSmith + ? LangsmithIcon + : inUseTracingProvider === TracingProvider.langfuse + ? LangfuseIcon + : inUseTracingProvider === TracingProvider.opik + ? OpikIcon + : null const [langSmithConfig, setLangSmithConfig] = useState(null) const [langFuseConfig, setLangFuseConfig] = useState(null) - const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig) + const [opikConfig, setOpikConfig] = useState(null) + const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig) const fetchTracingConfig = async () => { const { tracing_config: langSmithConfig, has_not_configured: langSmithHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.langSmith }) @@ -83,6 +92,9 @@ const Panel: FC = () => { const { tracing_config: langFuseConfig, has_not_configured: langFuseHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.langfuse }) if (!langFuseHasNotConfig) setLangFuseConfig(langFuseConfig as LangFuseConfig) + const { tracing_config: opikConfig, has_not_configured: OpikHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.opik }) + if (!OpikHasNotConfig) + setOpikConfig(opikConfig as OpikConfig) } const handleTracingConfigUpdated = async (provider: TracingProvider) => { @@ -90,15 +102,19 @@ const Panel: FC = () => { const { tracing_config } = await doFetchTracingConfig({ appId, provider }) if (provider === TracingProvider.langSmith) setLangSmithConfig(tracing_config as LangSmithConfig) - else + else if (provider === TracingProvider.langSmith) setLangFuseConfig(tracing_config as LangFuseConfig) + else if (provider === TracingProvider.opik) + setOpikConfig(tracing_config as OpikConfig) } const handleTracingConfigRemoved = (provider: TracingProvider) => { if (provider === TracingProvider.langSmith) setLangSmithConfig(null) - else + else if (provider === TracingProvider.langSmith) setLangFuseConfig(null) + else if (provider === TracingProvider.opik) + setOpikConfig(null) if (provider === inUseTracingProvider) { handleTracingStatusChange({ enabled: false, @@ -167,6 +183,7 @@ const Panel: FC = () => { onChooseProvider={handleChooseProvider} langSmithConfig={langSmithConfig} langFuseConfig={langFuseConfig} + opikConfig={opikConfig} onConfigUpdated={handleTracingConfigUpdated} onConfigRemoved={handleTracingConfigRemoved} controlShowPopup={controlShowPopup} diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx index e7ecd2f4ce..b813e9b134 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx @@ -4,7 +4,7 @@ import React, { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' import Field from './field' -import type { LangFuseConfig, LangSmithConfig } from './type' +import type { LangFuseConfig, LangSmithConfig, OpikConfig } from './type' import { TracingProvider } from './type' import { docURL } from './config' import { @@ -21,10 +21,10 @@ import Toast from '@/app/components/base/toast' type Props = { appId: string type: TracingProvider - payload?: LangSmithConfig | LangFuseConfig | null + payload?: LangSmithConfig | LangFuseConfig | OpikConfig | null onRemoved: () => void onCancel: () => void - onSaved: (payload: LangSmithConfig | LangFuseConfig) => void + onSaved: (payload: LangSmithConfig | LangFuseConfig | OpikConfig) => void onChosen: (provider: TracingProvider) => void } @@ -42,6 +42,13 @@ const langFuseConfigTemplate = { host: '', } +const opikConfigTemplate = { + api_key: '', + project: '', + url: '', + workspace: '', +} + const ProviderConfigModal: FC = ({ appId, type, @@ -55,14 +62,17 @@ const ProviderConfigModal: FC = ({ const isEdit = !!payload const isAdd = !isEdit const [isSaving, setIsSaving] = useState(false) - const [config, setConfig] = useState((() => { + const [config, setConfig] = useState((() => { if (isEdit) return payload if (type === TracingProvider.langSmith) return langSmithConfigTemplate - return langFuseConfigTemplate + else if (type === TracingProvider.langfuse) + return langFuseConfigTemplate + + return opikConfigTemplate })()) const [isShowRemoveConfirm, { setTrue: showRemoveConfirm, @@ -111,6 +121,10 @@ const ProviderConfigModal: FC = ({ errorMessage = t('common.errorMsg.fieldRequired', { field: 'Host' }) } + if (type === TracingProvider.opik) { + const postData = config as OpikConfig + } + return errorMessage }, [config, t, type]) const handleSave = useCallback(async () => { @@ -215,6 +229,38 @@ const ProviderConfigModal: FC = ({ /> )} + {type === TracingProvider.opik && ( + <> + + + + + + )}
diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-panel.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-panel.tsx index 6e5046ecf8..34e5bbeb0f 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-panel.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-panel.tsx @@ -4,7 +4,7 @@ import React, { useCallback } from 'react' import { useTranslation } from 'react-i18next' import { TracingProvider } from './type' import cn from '@/utils/classnames' -import { LangfuseIconBig, LangsmithIconBig } from '@/app/components/base/icons/src/public/tracing' +import { LangfuseIconBig, LangsmithIconBig, OpikIconBig } from '@/app/components/base/icons/src/public/tracing' import { Settings04 } from '@/app/components/base/icons/src/vender/line/general' import { Eye as View } from '@/app/components/base/icons/src/vender/solid/general' @@ -24,6 +24,7 @@ const getIcon = (type: TracingProvider) => { return ({ [TracingProvider.langSmith]: LangsmithIconBig, [TracingProvider.langfuse]: LangfuseIconBig, + [TracingProvider.opik]: OpikIconBig, })[type] } diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts index e07cf37c9d..982d01ffb3 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts @@ -1,6 +1,7 @@ export enum TracingProvider { langSmith = 'langsmith', langfuse = 'langfuse', + opik = 'opik', } export type LangSmithConfig = { @@ -14,3 +15,10 @@ export type LangFuseConfig = { secret_key: string host: string } + +export type OpikConfig = { + api_key: string + project: string + workspace: string + url: string +} diff --git a/web/app/(commonLayout)/datasets/Container.tsx b/web/app/(commonLayout)/datasets/Container.tsx index c39d9c5dbf..f484d30a3d 100644 --- a/web/app/(commonLayout)/datasets/Container.tsx +++ b/web/app/(commonLayout)/datasets/Container.tsx @@ -82,7 +82,7 @@ const Container = () => { }, [currentWorkspace, router]) return ( -
+
= ({ - apiBaseUrl, -}) => { - const { locale } = useContext(I18n) +const Doc = ({ apiBaseUrl }: DocProps) => { + const { locale } = useContext(I18n) + const { t } = useTranslation() + const [toc, setToc] = useState>([]) + const [isTocExpanded, setIsTocExpanded] = useState(false) + + // Set initial TOC expanded state based on screen width useEffect(() => { - const hash = location.hash - if (hash) - document.querySelector(hash)?.scrollIntoView() + const mediaQuery = window.matchMedia('(min-width: 1280px)') + setIsTocExpanded(mediaQuery.matches) }, []) + // Extract TOC from article content + useEffect(() => { + const extractTOC = () => { + const article = document.querySelector('article') + if (article) { + const headings = article.querySelectorAll('h2') + const tocItems = Array.from(headings).map((heading) => { + const anchor = heading.querySelector('a') + if (anchor) { + return { + href: anchor.getAttribute('href') || '', + text: anchor.textContent || '', + } + } + return null + }).filter((item): item is { href: string; text: string } => item !== null) + setToc(tocItems) + } + } + + setTimeout(extractTOC, 0) + }, [locale]) + + // Handle TOC item click + const handleTocClick = (e: React.MouseEvent, item: { href: string; text: string }) => { + e.preventDefault() + const targetId = item.href.replace('#', '') + const element = document.getElementById(targetId) + if (element) { + const scrollContainer = document.querySelector('.scroll-container') + if (scrollContainer) { + const headerOffset = -40 + const elementTop = element.offsetTop - headerOffset + scrollContainer.scrollTo({ + top: elementTop, + behavior: 'smooth', + }) + } + } + } + return ( -
- { - locale !== LanguagesSupported[1] +
+
+ {isTocExpanded + ? ( + + ) + : ( + + )} +
+
+ {locale !== LanguagesSupported[1] ? : - } -
+ } +
+
) } diff --git a/web/app/(shareLayout)/layout.tsx b/web/app/(shareLayout)/layout.tsx index 259af2bc2d..94ac1deb0b 100644 --- a/web/app/(shareLayout)/layout.tsx +++ b/web/app/(shareLayout)/layout.tsx @@ -1,7 +1,6 @@ import React from 'react' import type { FC } from 'react' import type { Metadata } from 'next' -import GA, { GaType } from '@/app/components/base/ga' export const metadata: Metadata = { icons: 'data:,', // prevent browser from using default favicon @@ -12,7 +11,6 @@ const Layout: FC<{ }> = ({ children }) => { return (
- {children}
) diff --git a/web/app/account/account-page/AvatarWithEdit.tsx b/web/app/account/account-page/AvatarWithEdit.tsx new file mode 100644 index 0000000000..97f6ba8da6 --- /dev/null +++ b/web/app/account/account-page/AvatarWithEdit.tsx @@ -0,0 +1,122 @@ +'use client' + +import type { Area } from 'react-easy-crop' +import React, { useCallback, useState } from 'react' +import { useTranslation } from 'react-i18next' +import { useContext } from 'use-context-selector' +import { RiPencilLine } from '@remixicon/react' +import { updateUserProfile } from '@/service/common' +import { ToastContext } from '@/app/components/base/toast' +import ImageInput, { type OnImageInput } from '@/app/components/base/app-icon-picker/ImageInput' +import Modal from '@/app/components/base/modal' +import Divider from '@/app/components/base/divider' +import Button from '@/app/components/base/button' +import Avatar, { type AvatarProps } from '@/app/components/base/avatar' +import { useLocalFileUploader } from '@/app/components/base/image-uploader/hooks' +import type { ImageFile } from '@/types/app' +import getCroppedImg from '@/app/components/base/app-icon-picker/utils' +import { DISABLE_UPLOAD_IMAGE_AS_ICON } from '@/config' + +type InputImageInfo = { file: File } | { tempUrl: string; croppedAreaPixels: Area; fileName: string } +type AvatarWithEditProps = AvatarProps & { onSave?: () => void } + +const AvatarWithEdit = ({ onSave, ...props }: AvatarWithEditProps) => { + const { t } = useTranslation() + const { notify } = useContext(ToastContext) + + const [inputImageInfo, setInputImageInfo] = useState() + const [isShowAvatarPicker, setIsShowAvatarPicker] = useState(false) + const [uploading, setUploading] = useState(false) + + const handleImageInput: OnImageInput = useCallback(async (isCropped: boolean, fileOrTempUrl: string | File, croppedAreaPixels?: Area, fileName?: string) => { + setInputImageInfo( + isCropped + ? { tempUrl: fileOrTempUrl as string, croppedAreaPixels: croppedAreaPixels!, fileName: fileName! } + : { file: fileOrTempUrl as File }, + ) + }, [setInputImageInfo]) + + const handleSaveAvatar = useCallback(async (uploadedFileId: string) => { + try { + await updateUserProfile({ url: 'account/avatar', body: { avatar: uploadedFileId } }) + notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) + setIsShowAvatarPicker(false) + onSave?.() + } + catch (e) { + notify({ type: 'error', message: (e as Error).message }) + } + }, [notify, onSave, t]) + + const { handleLocalFileUpload } = useLocalFileUploader({ + limit: 3, + disabled: false, + onUpload: (imageFile: ImageFile) => { + if (imageFile.progress === 100) { + setUploading(false) + setInputImageInfo(undefined) + handleSaveAvatar(imageFile.fileId) + } + + // Error + if (imageFile.progress === -1) + setUploading(false) + }, + }) + + const handleSelect = useCallback(async () => { + if (!inputImageInfo) + return + setUploading(true) + if ('file' in inputImageInfo) { + handleLocalFileUpload(inputImageInfo.file) + return + } + const blob = await getCroppedImg(inputImageInfo.tempUrl, inputImageInfo.croppedAreaPixels, inputImageInfo.fileName) + const file = new File([blob], inputImageInfo.fileName, { type: blob.type }) + handleLocalFileUpload(file) + }, [handleLocalFileUpload, inputImageInfo]) + + if (DISABLE_UPLOAD_IMAGE_AS_ICON) + return + + return ( + <> +
+
+ +
{ setIsShowAvatarPicker(true) }} + className="absolute inset-0 bg-black bg-opacity-50 rounded-full opacity-0 group-hover:opacity-100 transition-opacity cursor-pointer flex items-center justify-center" + > + + + +
+
+
+ + setIsShowAvatarPicker(false)} + > + + + +
+ + + +
+
+ + ) +} + +export default AvatarWithEdit diff --git a/web/app/account/account-page/index.tsx b/web/app/account/account-page/index.tsx index 4435019561..16d826a7c2 100644 --- a/web/app/account/account-page/index.tsx +++ b/web/app/account/account-page/index.tsx @@ -5,6 +5,7 @@ import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' import DeleteAccount from '../delete-account' import s from './index.module.css' +import AvatarWithEdit from './AvatarWithEdit' import Collapse from '@/app/components/header/account-setting/collapse' import type { IItem } from '@/app/components/header/account-setting/collapse' import Modal from '@/app/components/base/modal' @@ -13,7 +14,6 @@ import { updateUserProfile } from '@/service/common' import { useAppContext } from '@/context/app-context' import { ToastContext } from '@/app/components/base/toast' import AppIcon from '@/app/components/base/app-icon' -import Avatar from '@/app/components/base/avatar' import { IS_CE_EDITION } from '@/config' import Input from '@/app/components/base/input' @@ -133,7 +133,7 @@ export default function AccountPage() {

{t('common.account.myAccount')}

- +

{userProfile.name}

{userProfile.email}

diff --git a/web/app/account/avatar.tsx b/web/app/account/avatar.tsx index 8fdecc07bf..47e8e75747 100644 --- a/web/app/account/avatar.tsx +++ b/web/app/account/avatar.tsx @@ -45,7 +45,7 @@ export default function AppSelector() { ${open && 'bg-components-panel-bg-blur'} `} > - +
{userProfile.name}
{userProfile.email}
- +
diff --git a/web/app/components/app/configuration/debug/debug-with-multiple-model/chat-item.tsx b/web/app/components/app/configuration/debug/debug-with-multiple-model/chat-item.tsx index 1144c323d1..119db34b16 100644 --- a/web/app/components/app/configuration/debug/debug-with-multiple-model/chat-item.tsx +++ b/web/app/components/app/configuration/debug/debug-with-multiple-model/chat-item.tsx @@ -149,7 +149,7 @@ const ChatItem: FC = ({ suggestedQuestions={suggestedQuestions} onSend={doSend} showPromptLog - questionIcon={} + questionIcon={} allToolIcons={allToolIcons} hideLogModal noSpacing diff --git a/web/app/components/app/configuration/debug/debug-with-single-model/index.tsx b/web/app/components/app/configuration/debug/debug-with-single-model/index.tsx index 2cbfe91f16..48e1e55de4 100644 --- a/web/app/components/app/configuration/debug/debug-with-single-model/index.tsx +++ b/web/app/components/app/configuration/debug/debug-with-single-model/index.tsx @@ -175,7 +175,7 @@ const DebugWithSingleModel = forwardRef} + questionIcon={} allToolIcons={allToolIcons} onAnnotationEdited={handleAnnotationEdited} onAnnotationAdded={handleAnnotationAdded} diff --git a/web/app/components/app/log/var-panel.tsx b/web/app/components/app/log/var-panel.tsx index 3ae4bfb5c6..eef1a1a4c0 100644 --- a/web/app/components/app/log/var-panel.tsx +++ b/web/app/components/app/log/var-panel.tsx @@ -39,7 +39,7 @@ const VarPanel: FC = ({ }
{!isCollapse && ( -
+
{varList.map(({ label, value }, index) => (
diff --git a/web/app/components/app/overview/settings/index.tsx b/web/app/components/app/overview/settings/index.tsx index e7cc4148ef..f9d13b9272 100644 --- a/web/app/components/app/overview/settings/index.tsx +++ b/web/app/components/app/overview/settings/index.tsx @@ -1,26 +1,33 @@ 'use client' import type { FC } from 'react' -import React, { useEffect, useState } from 'react' -import { ChevronRightIcon } from '@heroicons/react/20/solid' +import React, { useCallback, useEffect, useState } from 'react' +import { RiArrowRightSLine, RiCloseLine } from '@remixicon/react' import Link from 'next/link' import { Trans, useTranslation } from 'react-i18next' -import { useContextSelector } from 'use-context-selector' -import s from './style.module.css' +import { useContext, useContextSelector } from 'use-context-selector' +import { SparklesSoft } from '@/app/components/base/icons/src/public/common' import Modal from '@/app/components/base/modal' +import ActionButton from '@/app/components/base/action-button' import Button from '@/app/components/base/button' +import Divider from '@/app/components/base/divider' import Input from '@/app/components/base/input' import Textarea from '@/app/components/base/textarea' import AppIcon from '@/app/components/base/app-icon' import Switch from '@/app/components/base/switch' +import PremiumBadge from '@/app/components/base/premium-badge' import { SimpleSelect } from '@/app/components/base/select' import type { AppDetailResponse } from '@/models/app' import type { AppIconType, AppSSO, Language } from '@/types/app' import { useToastContext } from '@/app/components/base/toast' -import { languages } from '@/i18n/language' +import { LanguagesSupported, languages } from '@/i18n/language' import Tooltip from '@/app/components/base/tooltip' import AppContext, { useAppContext } from '@/context/app-context' +import { useProviderContext } from '@/context/provider-context' +import { useModalContext } from '@/context/modal-context' import type { AppIconSelection } from '@/app/components/base/app-icon-picker' import AppIconPicker from '@/app/components/base/app-icon-picker' +import I18n from '@/context/i18n' +import cn from '@/utils/classnames' export type ISettingsModalProps = { isChat: boolean @@ -84,6 +91,7 @@ const SettingsModal: FC = ({ chatColorTheme: chat_color_theme, chatColorThemeInverted: chat_color_theme_inverted, copyright, + copyrightSwitchValue: !!copyright, privacyPolicy: privacy_policy, customDisclaimer: custom_disclaimer, show_workflow_steps, @@ -93,6 +101,7 @@ const SettingsModal: FC = ({ const [language, setLanguage] = useState(default_language) const [saveLoading, setSaveLoading] = useState(false) const { t } = useTranslation() + const { locale } = useContext(I18n) const [showAppIconPicker, setShowAppIconPicker] = useState(false) const [appIcon, setAppIcon] = useState( @@ -100,7 +109,16 @@ const SettingsModal: FC = ({ ? { type: 'image', url: icon_url!, fileId: icon } : { type: 'emoji', icon, background: icon_background! }, ) - const isChatBot = appInfo.mode === 'chat' || appInfo.mode === 'advanced-chat' || appInfo.mode === 'agent-chat' + + const { enableBilling, plan } = useProviderContext() + const { setShowPricingModal, setShowAccountSettingModal } = useModalContext() + const isFreePlan = plan.type === 'sandbox' + const handlePlanClick = useCallback(() => { + if (isFreePlan) + setShowPricingModal() + else + setShowAccountSettingModal({ payload: 'billing' }) + }, [isFreePlan, setShowAccountSettingModal, setShowPricingModal]) useEffect(() => { setInputInfo({ @@ -109,6 +127,7 @@ const SettingsModal: FC = ({ chatColorTheme: chat_color_theme, chatColorThemeInverted: chat_color_theme_inverted, copyright, + copyrightSwitchValue: !!copyright, privacyPolicy: privacy_policy, customDisclaimer: custom_disclaimer, show_workflow_steps, @@ -158,7 +177,11 @@ const SettingsModal: FC = ({ chat_color_theme: inputInfo.chatColorTheme, chat_color_theme_inverted: inputInfo.chatColorThemeInverted, prompt_public: false, - copyright: inputInfo.copyright, + copyright: isFreePlan + ? '' + : inputInfo.copyrightSwitchValue + ? inputInfo.copyright + : '', privacy_policy: inputInfo.privacyPolicy, custom_disclaimer: inputInfo.customDisclaimer, icon_type: appIcon.type, @@ -192,141 +215,232 @@ const SettingsModal: FC = ({ return ( <> -
{t(`${prefixSettings}.webName`)}
-
- { setShowAppIconPicker(true) }} - className='cursor-pointer !mr-3 self-center' - iconType={appIcon.type} - icon={appIcon.type === 'image' ? appIcon.fileId : appIcon.icon} - background={appIcon.type === 'image' ? undefined : appIcon.background} - imageUrl={appIcon.type === 'image' ? appIcon.url : undefined} - /> - + {/* header */} +
+
+
{t(`${prefixSettings}.title`)}
+ + + +
+
+ {t(`${prefixSettings}.modalTip`)} + {t('common.operation.learnMore')} +
-
{t(`${prefixSettings}.webDesc`)}
-

{t(`${prefixSettings}.webDescTip`)}

-