diff --git a/.devcontainer/post_create_command.sh b/.devcontainer/post_create_command.sh index 79796393a2..d879876d8a 100755 --- a/.devcontainer/post_create_command.sh +++ b/.devcontainer/post_create_command.sh @@ -8,5 +8,6 @@ echo 'alias start-api="cd /workspaces/dify/api && poetry run python -m flask run echo 'alias start-worker="cd /workspaces/dify/api && poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc echo 'alias start-web="cd /workspaces/dify/web && pnpm dev"' >> ~/.bashrc echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify up -d"' >> ~/.bashrc +echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify down"' >> ~/.bashrc source /home/vscode/.bashrc diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index f7221fb4d1..aed7fe8de3 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -37,6 +37,7 @@ jobs: - name: Ruff check if: steps.changed-files.outputs.any_changed == 'true' run: | + poetry run -C api ruff --version poetry run -C api ruff check ./api poetry run -C api ruff format --check ./api diff --git a/.github/workflows/vdb-tests.yml b/.github/workflows/vdb-tests.yml index 73af370063..146bee95f2 100644 --- a/.github/workflows/vdb-tests.yml +++ b/.github/workflows/vdb-tests.yml @@ -51,7 +51,7 @@ jobs: - name: Expose Service Ports run: sh .github/workflows/expose_service_ports.sh - - name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase) + - name: Set up Vector Stores (TiDB, Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase) uses: hoverkraft-tech/compose-action@v2.0.2 with: compose-file: | @@ -67,6 +67,7 @@ jobs: pgvector chroma elasticsearch + tidb - name: Test Vector Stores run: poetry run -C api bash dev/pytest/pytest_vdb.sh diff --git a/api/.env.example b/api/.env.example index 52cdd9ecb2..1ff6b3be8b 100644 --- a/api/.env.example +++ b/api/.env.example @@ -56,20 +56,36 @@ DB_DATABASE=dify # Storage configuration # use for store upload files, private keys... -# storage type: local, s3, aliyun-oss, azure-blob, baidu-obs, google-storage, huawei-obs, oci-storage, tencent-cos, volcengine-tos, supabase -STORAGE_TYPE=local -STORAGE_LOCAL_PATH=storage +# storage type: opendal, s3, aliyun-oss, azure-blob, baidu-obs, google-storage, huawei-obs, oci-storage, tencent-cos, volcengine-tos, supabase +STORAGE_TYPE=opendal + +# Apache OpenDAL storage configuration, refer to https://github.com/apache/opendal +STORAGE_OPENDAL_SCHEME=fs +# OpenDAL FS +OPENDAL_FS_ROOT=storage +# OpenDAL S3 +OPENDAL_S3_ROOT=/ +OPENDAL_S3_BUCKET=your-bucket-name +OPENDAL_S3_ENDPOINT=https://s3.amazonaws.com +OPENDAL_S3_ACCESS_KEY_ID=your-access-key +OPENDAL_S3_SECRET_ACCESS_KEY=your-secret-key +OPENDAL_S3_REGION=your-region +OPENDAL_S3_SERVER_SIDE_ENCRYPTION= + +# S3 Storage configuration S3_USE_AWS_MANAGED_IAM=false S3_ENDPOINT=https://your-bucket-name.storage.s3.clooudflare.com S3_BUCKET_NAME=your-bucket-name S3_ACCESS_KEY=your-access-key S3_SECRET_KEY=your-secret-key S3_REGION=your-region + # Azure Blob Storage configuration AZURE_BLOB_ACCOUNT_NAME=your-account-name AZURE_BLOB_ACCOUNT_KEY=your-account-key AZURE_BLOB_CONTAINER_NAME=yout-container-name AZURE_BLOB_ACCOUNT_URL=https://.blob.core.windows.net + # Aliyun oss Storage configuration ALIYUN_OSS_BUCKET_NAME=your-bucket-name ALIYUN_OSS_ACCESS_KEY=your-access-key @@ -79,6 +95,7 @@ ALIYUN_OSS_AUTH_VERSION=v1 ALIYUN_OSS_REGION=your-region # Don't start with '/'. OSS doesn't support leading slash in object names. ALIYUN_OSS_PATH=your-path + # Google Storage configuration GOOGLE_STORAGE_BUCKET_NAME=yout-bucket-name GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string @@ -125,8 +142,8 @@ SUPABASE_URL=your-server-url WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* - -# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase +# Vector database configuration +# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase VECTOR_STORE=weaviate # Weaviate configuration @@ -277,6 +294,7 @@ VIKINGDB_SOCKET_TIMEOUT=30 LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070 LINDORM_USERNAME=admin LINDORM_PASSWORD=admin +USING_UGC_INDEX=False # OceanBase Vector configuration OCEANBASE_VECTOR_HOST=127.0.0.1 @@ -381,6 +399,8 @@ LOG_FILE_BACKUP_COUNT=5 LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S # Log Timezone LOG_TZ=UTC +# Log format +LOG_FORMAT=%(asctime)s,%(msecs)d %(levelname)-2s [%(filename)s:%(lineno)d] %(req_id)s %(message)s # Indexing configuration INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000 diff --git a/api/configs/app_config.py b/api/configs/app_config.py index 07ef6121cc..ac1ce9db10 100644 --- a/api/configs/app_config.py +++ b/api/configs/app_config.py @@ -1,11 +1,51 @@ -from pydantic_settings import SettingsConfigDict +import logging +from typing import Any -from configs.deploy import DeploymentConfig -from configs.enterprise import EnterpriseFeatureConfig -from configs.extra import ExtraServiceConfig -from configs.feature import FeatureConfig -from configs.middleware import MiddlewareConfig -from configs.packaging import PackagingInfo +from pydantic.fields import FieldInfo +from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict + +from .deploy import DeploymentConfig +from .enterprise import EnterpriseFeatureConfig +from .extra import ExtraServiceConfig +from .feature import FeatureConfig +from .middleware import MiddlewareConfig +from .packaging import PackagingInfo +from .remote_settings_sources import RemoteSettingsSource, RemoteSettingsSourceConfig, RemoteSettingsSourceName +from .remote_settings_sources.apollo import ApolloSettingsSource + +logger = logging.getLogger(__name__) + + +class RemoteSettingsSourceFactory(PydanticBaseSettingsSource): + def __init__(self, settings_cls: type[BaseSettings]): + super().__init__(settings_cls) + + def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]: + raise NotImplementedError + + def __call__(self) -> dict[str, Any]: + current_state = self.current_state + remote_source_name = current_state.get("REMOTE_SETTINGS_SOURCE_NAME") + if not remote_source_name: + return {} + + remote_source: RemoteSettingsSource | None = None + match remote_source_name: + case RemoteSettingsSourceName.APOLLO: + remote_source = ApolloSettingsSource(current_state) + case _: + logger.warning(f"Unsupported remote source: {remote_source_name}") + return {} + + d: dict[str, Any] = {} + + for field_name, field in self.settings_cls.model_fields.items(): + field_value, field_key, value_is_complex = remote_source.get_field_value(field, field_name) + field_value = remote_source.prepare_field_value(field_name, field, field_value, value_is_complex) + if field_value is not None: + d[field_key] = field_value + + return d class DifyConfig( @@ -19,6 +59,8 @@ class DifyConfig( MiddlewareConfig, # Extra service configs ExtraServiceConfig, + # Remote source configs + RemoteSettingsSourceConfig, # Enterprise feature configs # **Before using, please contact business@dify.ai by email to inquire about licensing matters.** EnterpriseFeatureConfig, @@ -35,3 +77,20 @@ class DifyConfig( # please consider to arrange it in the proper config group of existed or added # for better readability and maintainability. # Thanks for your concentration and consideration. + + @classmethod + def settings_customise_sources( + cls, + settings_cls: type[BaseSettings], + init_settings: PydanticBaseSettingsSource, + env_settings: PydanticBaseSettingsSource, + dotenv_settings: PydanticBaseSettingsSource, + file_secret_settings: PydanticBaseSettingsSource, + ) -> tuple[PydanticBaseSettingsSource, ...]: + return ( + init_settings, + env_settings, + RemoteSettingsSourceFactory(settings_cls), + dotenv_settings, + file_secret_settings, + ) diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index 57cc805ebf..9265a48d9b 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -1,54 +1,69 @@ -from typing import Any, Optional +from typing import Any, Literal, Optional from urllib.parse import quote_plus from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt, computed_field from pydantic_settings import BaseSettings -from configs.middleware.cache.redis_config import RedisConfig -from configs.middleware.storage.aliyun_oss_storage_config import AliyunOSSStorageConfig -from configs.middleware.storage.amazon_s3_storage_config import S3StorageConfig -from configs.middleware.storage.azure_blob_storage_config import AzureBlobStorageConfig -from configs.middleware.storage.baidu_obs_storage_config import BaiduOBSStorageConfig -from configs.middleware.storage.google_cloud_storage_config import GoogleCloudStorageConfig -from configs.middleware.storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig -from configs.middleware.storage.oci_storage_config import OCIStorageConfig -from configs.middleware.storage.supabase_storage_config import SupabaseStorageConfig -from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig -from configs.middleware.storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig -from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig -from configs.middleware.vdb.baidu_vector_config import BaiduVectorDBConfig -from configs.middleware.vdb.chroma_config import ChromaConfig -from configs.middleware.vdb.couchbase_config import CouchbaseConfig -from configs.middleware.vdb.elasticsearch_config import ElasticsearchConfig -from configs.middleware.vdb.lindorm_config import LindormConfig -from configs.middleware.vdb.milvus_config import MilvusConfig -from configs.middleware.vdb.myscale_config import MyScaleConfig -from configs.middleware.vdb.oceanbase_config import OceanBaseVectorConfig -from configs.middleware.vdb.opensearch_config import OpenSearchConfig -from configs.middleware.vdb.oracle_config import OracleConfig -from configs.middleware.vdb.pgvector_config import PGVectorConfig -from configs.middleware.vdb.pgvectors_config import PGVectoRSConfig -from configs.middleware.vdb.qdrant_config import QdrantConfig -from configs.middleware.vdb.relyt_config import RelytConfig -from configs.middleware.vdb.tencent_vector_config import TencentVectorDBConfig -from configs.middleware.vdb.tidb_on_qdrant_config import TidbOnQdrantConfig -from configs.middleware.vdb.tidb_vector_config import TiDBVectorConfig -from configs.middleware.vdb.upstash_config import UpstashConfig -from configs.middleware.vdb.vikingdb_config import VikingDBConfig -from configs.middleware.vdb.weaviate_config import WeaviateConfig +from .cache.redis_config import RedisConfig +from .storage.aliyun_oss_storage_config import AliyunOSSStorageConfig +from .storage.amazon_s3_storage_config import S3StorageConfig +from .storage.azure_blob_storage_config import AzureBlobStorageConfig +from .storage.baidu_obs_storage_config import BaiduOBSStorageConfig +from .storage.google_cloud_storage_config import GoogleCloudStorageConfig +from .storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig +from .storage.oci_storage_config import OCIStorageConfig +from .storage.opendal_storage_config import OpenDALStorageConfig +from .storage.supabase_storage_config import SupabaseStorageConfig +from .storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig +from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig +from .vdb.analyticdb_config import AnalyticdbConfig +from .vdb.baidu_vector_config import BaiduVectorDBConfig +from .vdb.chroma_config import ChromaConfig +from .vdb.couchbase_config import CouchbaseConfig +from .vdb.elasticsearch_config import ElasticsearchConfig +from .vdb.lindorm_config import LindormConfig +from .vdb.milvus_config import MilvusConfig +from .vdb.myscale_config import MyScaleConfig +from .vdb.oceanbase_config import OceanBaseVectorConfig +from .vdb.opensearch_config import OpenSearchConfig +from .vdb.oracle_config import OracleConfig +from .vdb.pgvector_config import PGVectorConfig +from .vdb.pgvectors_config import PGVectoRSConfig +from .vdb.qdrant_config import QdrantConfig +from .vdb.relyt_config import RelytConfig +from .vdb.tencent_vector_config import TencentVectorDBConfig +from .vdb.tidb_on_qdrant_config import TidbOnQdrantConfig +from .vdb.tidb_vector_config import TiDBVectorConfig +from .vdb.upstash_config import UpstashConfig +from .vdb.vikingdb_config import VikingDBConfig +from .vdb.weaviate_config import WeaviateConfig class StorageConfig(BaseSettings): - STORAGE_TYPE: str = Field( + STORAGE_TYPE: Literal[ + "opendal", + "s3", + "aliyun-oss", + "azure-blob", + "baidu-obs", + "google-storage", + "huawei-obs", + "oci-storage", + "tencent-cos", + "volcengine-tos", + "supabase", + "local", + ] = Field( description="Type of storage to use." - " Options: 'local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', 'huawei-obs', " - "'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'local'.", - default="local", + " Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', " + "'huawei-obs', 'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'opendal'.", + default="opendal", ) STORAGE_LOCAL_PATH: str = Field( description="Path for local storage when STORAGE_TYPE is set to 'local'.", default="storage", + deprecated=True, ) @@ -73,7 +88,7 @@ class KeywordStoreConfig(BaseSettings): ) -class DatabaseConfig: +class DatabaseConfig(BaseSettings): DB_HOST: str = Field( description="Hostname or IP address of the database server.", default="localhost", @@ -235,6 +250,7 @@ class MiddlewareConfig( GoogleCloudStorageConfig, HuaweiCloudOBSStorageConfig, OCIStorageConfig, + OpenDALStorageConfig, S3StorageConfig, SupabaseStorageConfig, TencentCloudCOSStorageConfig, diff --git a/api/configs/middleware/storage/baidu_obs_storage_config.py b/api/configs/middleware/storage/baidu_obs_storage_config.py index c511628a15..e7913b0acc 100644 --- a/api/configs/middleware/storage/baidu_obs_storage_config.py +++ b/api/configs/middleware/storage/baidu_obs_storage_config.py @@ -1,9 +1,10 @@ from typing import Optional -from pydantic import BaseModel, Field +from pydantic import Field +from pydantic_settings import BaseSettings -class BaiduOBSStorageConfig(BaseModel): +class BaiduOBSStorageConfig(BaseSettings): """ Configuration settings for Baidu Object Storage Service (OBS) """ diff --git a/api/configs/middleware/storage/huawei_obs_storage_config.py b/api/configs/middleware/storage/huawei_obs_storage_config.py index 3e9e7543ab..be983b5187 100644 --- a/api/configs/middleware/storage/huawei_obs_storage_config.py +++ b/api/configs/middleware/storage/huawei_obs_storage_config.py @@ -1,9 +1,10 @@ from typing import Optional -from pydantic import BaseModel, Field +from pydantic import Field +from pydantic_settings import BaseSettings -class HuaweiCloudOBSStorageConfig(BaseModel): +class HuaweiCloudOBSStorageConfig(BaseSettings): """ Configuration settings for Huawei Cloud Object Storage Service (OBS) """ diff --git a/api/configs/middleware/storage/opendal_storage_config.py b/api/configs/middleware/storage/opendal_storage_config.py new file mode 100644 index 0000000000..56a8d24edf --- /dev/null +++ b/api/configs/middleware/storage/opendal_storage_config.py @@ -0,0 +1,51 @@ +from enum import StrEnum +from typing import Literal + +from pydantic import Field +from pydantic_settings import BaseSettings + + +class OpenDALScheme(StrEnum): + FS = "fs" + S3 = "s3" + + +class OpenDALStorageConfig(BaseSettings): + STORAGE_OPENDAL_SCHEME: str = Field( + default=OpenDALScheme.FS.value, + description="OpenDAL scheme.", + ) + # FS + OPENDAL_FS_ROOT: str = Field( + default="storage", + description="Root path for local storage.", + ) + # S3 + OPENDAL_S3_ROOT: str = Field( + default="/", + description="Root path for S3 storage.", + ) + OPENDAL_S3_BUCKET: str = Field( + default="", + description="S3 bucket name.", + ) + OPENDAL_S3_ENDPOINT: str = Field( + default="https://s3.amazonaws.com", + description="S3 endpoint URL.", + ) + OPENDAL_S3_ACCESS_KEY_ID: str = Field( + default="", + description="S3 access key ID.", + ) + OPENDAL_S3_SECRET_ACCESS_KEY: str = Field( + default="", + description="S3 secret access key.", + ) + OPENDAL_S3_REGION: str = Field( + default="", + description="S3 region.", + ) + OPENDAL_S3_SERVER_SIDE_ENCRYPTION: Literal["aws:kms", ""] = Field( + default="", + description="S3 server-side encryption.", + ) diff --git a/api/configs/middleware/storage/supabase_storage_config.py b/api/configs/middleware/storage/supabase_storage_config.py index a3e905b21c..dcf7c20cf9 100644 --- a/api/configs/middleware/storage/supabase_storage_config.py +++ b/api/configs/middleware/storage/supabase_storage_config.py @@ -1,9 +1,10 @@ from typing import Optional -from pydantic import BaseModel, Field +from pydantic import Field +from pydantic_settings import BaseSettings -class SupabaseStorageConfig(BaseModel): +class SupabaseStorageConfig(BaseSettings): """ Configuration settings for Supabase Object Storage Service """ diff --git a/api/configs/middleware/storage/volcengine_tos_storage_config.py b/api/configs/middleware/storage/volcengine_tos_storage_config.py index 89ea885002..06c3ae4d3e 100644 --- a/api/configs/middleware/storage/volcengine_tos_storage_config.py +++ b/api/configs/middleware/storage/volcengine_tos_storage_config.py @@ -1,9 +1,10 @@ from typing import Optional -from pydantic import BaseModel, Field +from pydantic import Field +from pydantic_settings import BaseSettings -class VolcengineTOSStorageConfig(BaseModel): +class VolcengineTOSStorageConfig(BaseSettings): """ Configuration settings for Volcengine Tinder Object Storage (TOS) """ diff --git a/api/configs/middleware/vdb/analyticdb_config.py b/api/configs/middleware/vdb/analyticdb_config.py index 53cfaae43e..cb8dc7d724 100644 --- a/api/configs/middleware/vdb/analyticdb_config.py +++ b/api/configs/middleware/vdb/analyticdb_config.py @@ -1,9 +1,10 @@ from typing import Optional -from pydantic import BaseModel, Field, PositiveInt +from pydantic import Field, PositiveInt +from pydantic_settings import BaseSettings -class AnalyticdbConfig(BaseModel): +class AnalyticdbConfig(BaseSettings): """ Configuration for connecting to Alibaba Cloud AnalyticDB for PostgreSQL. Refer to the following documentation for details on obtaining credentials: diff --git a/api/configs/middleware/vdb/couchbase_config.py b/api/configs/middleware/vdb/couchbase_config.py index 391089ec6e..b81cbf8959 100644 --- a/api/configs/middleware/vdb/couchbase_config.py +++ b/api/configs/middleware/vdb/couchbase_config.py @@ -1,9 +1,10 @@ from typing import Optional -from pydantic import BaseModel, Field +from pydantic import Field +from pydantic_settings import BaseSettings -class CouchbaseConfig(BaseModel): +class CouchbaseConfig(BaseSettings): """ Couchbase configs """ diff --git a/api/configs/middleware/vdb/lindorm_config.py b/api/configs/middleware/vdb/lindorm_config.py index 0f6c652806..95e1d1cfca 100644 --- a/api/configs/middleware/vdb/lindorm_config.py +++ b/api/configs/middleware/vdb/lindorm_config.py @@ -21,3 +21,14 @@ class LindormConfig(BaseSettings): description="Lindorm password", default=None, ) + DEFAULT_INDEX_TYPE: Optional[str] = Field( + description="Lindorm Vector Index Type, hnsw or flat is available in dify", + default="hnsw", + ) + DEFAULT_DISTANCE_TYPE: Optional[str] = Field( + description="Vector Distance Type, support l2, cosinesimil, innerproduct", default="l2" + ) + USING_UGC_INDEX: Optional[bool] = Field( + description="Using UGC index will store the same type of Index in a single index but can retrieve separately.", + default=False, + ) diff --git a/api/configs/middleware/vdb/myscale_config.py b/api/configs/middleware/vdb/myscale_config.py index 5896c19d27..b5bf98b3aa 100644 --- a/api/configs/middleware/vdb/myscale_config.py +++ b/api/configs/middleware/vdb/myscale_config.py @@ -1,7 +1,8 @@ -from pydantic import BaseModel, Field, PositiveInt +from pydantic import Field, PositiveInt +from pydantic_settings import BaseSettings -class MyScaleConfig(BaseModel): +class MyScaleConfig(BaseSettings): """ Configuration settings for MyScale vector database """ diff --git a/api/configs/middleware/vdb/vikingdb_config.py b/api/configs/middleware/vdb/vikingdb_config.py index 3e718481dc..aba49ff670 100644 --- a/api/configs/middleware/vdb/vikingdb_config.py +++ b/api/configs/middleware/vdb/vikingdb_config.py @@ -1,9 +1,10 @@ from typing import Optional -from pydantic import BaseModel, Field +from pydantic import Field +from pydantic_settings import BaseSettings -class VikingDBConfig(BaseModel): +class VikingDBConfig(BaseSettings): """ Configuration for connecting to Volcengine VikingDB. Refer to the following documentation for details on obtaining credentials: diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py index a2703ccb94..0c2ccd826e 100644 --- a/api/configs/packaging/__init__.py +++ b/api/configs/packaging/__init__.py @@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings): CURRENT_VERSION: str = Field( description="Dify version", - default="0.13.1", + default="0.13.2", ) COMMIT_SHA: str = Field( diff --git a/api/configs/remote_settings_sources/__init__.py b/api/configs/remote_settings_sources/__init__.py new file mode 100644 index 0000000000..4f3878d13b --- /dev/null +++ b/api/configs/remote_settings_sources/__init__.py @@ -0,0 +1,17 @@ +from typing import Optional + +from pydantic import Field + +from .apollo import ApolloSettingsSourceInfo +from .base import RemoteSettingsSource +from .enums import RemoteSettingsSourceName + + +class RemoteSettingsSourceConfig(ApolloSettingsSourceInfo): + REMOTE_SETTINGS_SOURCE_NAME: RemoteSettingsSourceName | str = Field( + description="name of remote config source", + default="", + ) + + +__all__ = ["RemoteSettingsSource", "RemoteSettingsSourceConfig", "RemoteSettingsSourceName"] diff --git a/api/configs/remote_settings_sources/apollo/__init__.py b/api/configs/remote_settings_sources/apollo/__init__.py new file mode 100644 index 0000000000..f02f7dc9ff --- /dev/null +++ b/api/configs/remote_settings_sources/apollo/__init__.py @@ -0,0 +1,55 @@ +from collections.abc import Mapping +from typing import Any, Optional + +from pydantic import Field +from pydantic.fields import FieldInfo +from pydantic_settings import BaseSettings + +from configs.remote_settings_sources.base import RemoteSettingsSource + +from .client import ApolloClient + + +class ApolloSettingsSourceInfo(BaseSettings): + """ + Packaging build information + """ + + APOLLO_APP_ID: Optional[str] = Field( + description="apollo app_id", + default=None, + ) + + APOLLO_CLUSTER: Optional[str] = Field( + description="apollo cluster", + default=None, + ) + + APOLLO_CONFIG_URL: Optional[str] = Field( + description="apollo config url", + default=None, + ) + + APOLLO_NAMESPACE: Optional[str] = Field( + description="apollo namespace", + default=None, + ) + + +class ApolloSettingsSource(RemoteSettingsSource): + def __init__(self, configs: Mapping[str, Any]): + self.client = ApolloClient( + app_id=configs["APOLLO_APP_ID"], + cluster=configs["APOLLO_CLUSTER"], + config_url=configs["APOLLO_CONFIG_URL"], + start_hot_update=False, + _notification_map={configs["APOLLO_NAMESPACE"]: -1}, + ) + self.namespace = configs["APOLLO_NAMESPACE"] + self.remote_configs = self.client.get_all_dicts(self.namespace) + + def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]: + if not isinstance(self.remote_configs, dict): + raise ValueError(f"remote configs is not dict, but {type(self.remote_configs)}") + field_value = self.remote_configs.get(field_name) + return field_value, field_name, False diff --git a/api/configs/remote_settings_sources/apollo/client.py b/api/configs/remote_settings_sources/apollo/client.py new file mode 100644 index 0000000000..d1f6781ed3 --- /dev/null +++ b/api/configs/remote_settings_sources/apollo/client.py @@ -0,0 +1,303 @@ +import hashlib +import json +import logging +import os +import threading +import time +from pathlib import Path + +from .python_3x import http_request, makedirs_wrapper +from .utils import ( + CONFIGURATIONS, + NAMESPACE_NAME, + NOTIFICATION_ID, + get_value_from_dict, + init_ip, + no_key_cache_key, + signature, + url_encode_wrapper, +) + +logger = logging.getLogger(__name__) + + +class ApolloClient: + def __init__( + self, + config_url, + app_id, + cluster="default", + secret="", + start_hot_update=True, + change_listener=None, + _notification_map=None, + ): + # Core routing parameters + self.config_url = config_url + self.cluster = cluster + self.app_id = app_id + + # Non-core parameters + self.ip = init_ip() + self.secret = secret + + # Check the parameter variables + + # Private control variables + self._cycle_time = 5 + self._stopping = False + self._cache = {} + self._no_key = {} + self._hash = {} + self._pull_timeout = 75 + self._cache_file_path = os.path.expanduser("~") + "/.dify/config/remote-settings/apollo/cache/" + self._long_poll_thread = None + self._change_listener = change_listener # "add" "delete" "update" + if _notification_map is None: + _notification_map = {"application": -1} + self._notification_map = _notification_map + self.last_release_key = None + # Private startup method + self._path_checker() + if start_hot_update: + self._start_hot_update() + + # start the heartbeat thread + heartbeat = threading.Thread(target=self._heart_beat) + heartbeat.daemon = True + heartbeat.start() + + def get_json_from_net(self, namespace="application"): + url = "{}/configs/{}/{}/{}?releaseKey={}&ip={}".format( + self.config_url, self.app_id, self.cluster, namespace, "", self.ip + ) + try: + code, body = http_request(url, timeout=3, headers=self._sign_headers(url)) + if code == 200: + if not body: + logger.error(f"get_json_from_net load configs failed, body is {body}") + return None + data = json.loads(body) + data = data["configurations"] + return_data = {CONFIGURATIONS: data} + return return_data + else: + return None + except Exception: + logger.exception("an error occurred in get_json_from_net") + return None + + def get_value(self, key, default_val=None, namespace="application"): + try: + # read memory configuration + namespace_cache = self._cache.get(namespace) + val = get_value_from_dict(namespace_cache, key) + if val is not None: + return val + + no_key = no_key_cache_key(namespace, key) + if no_key in self._no_key: + return default_val + + # read the network configuration + namespace_data = self.get_json_from_net(namespace) + val = get_value_from_dict(namespace_data, key) + if val is not None: + self._update_cache_and_file(namespace_data, namespace) + return val + + # read the file configuration + namespace_cache = self._get_local_cache(namespace) + val = get_value_from_dict(namespace_cache, key) + if val is not None: + self._update_cache_and_file(namespace_cache, namespace) + return val + + # If all of them are not obtained, the default value is returned + # and the local cache is set to None + self._set_local_cache_none(namespace, key) + return default_val + except Exception: + logger.exception("get_value has error, [key is %s], [namespace is %s]", key, namespace) + return default_val + + # Set the key of a namespace to none, and do not set default val + # to ensure the real-time correctness of the function call. + # If the user does not have the same default val twice + # and the default val is used here, there may be a problem. + def _set_local_cache_none(self, namespace, key): + no_key = no_key_cache_key(namespace, key) + self._no_key[no_key] = key + + def _start_hot_update(self): + self._long_poll_thread = threading.Thread(target=self._listener) + # When the asynchronous thread is started, the daemon thread will automatically exit + # when the main thread is launched. + self._long_poll_thread.daemon = True + self._long_poll_thread.start() + + def stop(self): + self._stopping = True + logger.info("Stopping listener...") + + # Call the set callback function, and if it is abnormal, try it out + def _call_listener(self, namespace, old_kv, new_kv): + if self._change_listener is None: + return + if old_kv is None: + old_kv = {} + if new_kv is None: + new_kv = {} + try: + for key in old_kv: + new_value = new_kv.get(key) + old_value = old_kv.get(key) + if new_value is None: + # If newValue is empty, it means key, and the value is deleted. + self._change_listener("delete", namespace, key, old_value) + continue + if new_value != old_value: + self._change_listener("update", namespace, key, new_value) + continue + for key in new_kv: + new_value = new_kv.get(key) + old_value = old_kv.get(key) + if old_value is None: + self._change_listener("add", namespace, key, new_value) + except BaseException as e: + logger.warning(str(e)) + + def _path_checker(self): + if not os.path.isdir(self._cache_file_path): + makedirs_wrapper(self._cache_file_path) + + # update the local cache and file cache + def _update_cache_and_file(self, namespace_data, namespace="application"): + # update the local cache + self._cache[namespace] = namespace_data + # update the file cache + new_string = json.dumps(namespace_data) + new_hash = hashlib.md5(new_string.encode("utf-8")).hexdigest() + if self._hash.get(namespace) == new_hash: + pass + else: + file_path = Path(self._cache_file_path) / f"{self.app_id}_configuration_{namespace}.txt" + file_path.write_text(new_string) + self._hash[namespace] = new_hash + + # get the configuration from the local file + def _get_local_cache(self, namespace="application"): + cache_file_path = os.path.join(self._cache_file_path, f"{self.app_id}_configuration_{namespace}.txt") + if os.path.isfile(cache_file_path): + with open(cache_file_path) as f: + result = json.loads(f.readline()) + return result + return {} + + def _long_poll(self): + notifications = [] + for key in self._cache: + namespace_data = self._cache[key] + notification_id = -1 + if NOTIFICATION_ID in namespace_data: + notification_id = self._cache[key][NOTIFICATION_ID] + notifications.append({NAMESPACE_NAME: key, NOTIFICATION_ID: notification_id}) + try: + # if the length is 0 it is returned directly + if len(notifications) == 0: + return + url = "{}/notifications/v2".format(self.config_url) + params = { + "appId": self.app_id, + "cluster": self.cluster, + "notifications": json.dumps(notifications, ensure_ascii=False), + } + param_str = url_encode_wrapper(params) + url = url + "?" + param_str + code, body = http_request(url, self._pull_timeout, headers=self._sign_headers(url)) + http_code = code + if http_code == 304: + logger.debug("No change, loop...") + return + if http_code == 200: + if not body: + logger.error(f"_long_poll load configs failed,body is {body}") + return + data = json.loads(body) + for entry in data: + namespace = entry[NAMESPACE_NAME] + n_id = entry[NOTIFICATION_ID] + logger.info("%s has changes: notificationId=%d", namespace, n_id) + self._get_net_and_set_local(namespace, n_id, call_change=True) + return + else: + logger.warning("Sleep...") + except Exception as e: + logger.warning(str(e)) + + def _get_net_and_set_local(self, namespace, n_id, call_change=False): + namespace_data = self.get_json_from_net(namespace) + if not namespace_data: + return + namespace_data[NOTIFICATION_ID] = n_id + old_namespace = self._cache.get(namespace) + self._update_cache_and_file(namespace_data, namespace) + if self._change_listener is not None and call_change and old_namespace: + old_kv = old_namespace.get(CONFIGURATIONS) + new_kv = namespace_data.get(CONFIGURATIONS) + self._call_listener(namespace, old_kv, new_kv) + + def _listener(self): + logger.info("start long_poll") + while not self._stopping: + self._long_poll() + time.sleep(self._cycle_time) + logger.info("stopped, long_poll") + + # add the need for endorsement to the header + def _sign_headers(self, url): + headers = {} + if self.secret == "": + return headers + uri = url[len(self.config_url) : len(url)] + time_unix_now = str(int(round(time.time() * 1000))) + headers["Authorization"] = "Apollo " + self.app_id + ":" + signature(time_unix_now, uri, self.secret) + headers["Timestamp"] = time_unix_now + return headers + + def _heart_beat(self): + while not self._stopping: + for namespace in self._notification_map: + self._do_heart_beat(namespace) + time.sleep(60 * 10) # 10分钟 + + def _do_heart_beat(self, namespace): + url = "{}/configs/{}/{}/{}?ip={}".format(self.config_url, self.app_id, self.cluster, namespace, self.ip) + try: + code, body = http_request(url, timeout=3, headers=self._sign_headers(url)) + if code == 200: + if not body: + logger.error(f"_do_heart_beat load configs failed,body is {body}") + return None + data = json.loads(body) + if self.last_release_key == data["releaseKey"]: + return None + self.last_release_key = data["releaseKey"] + data = data["configurations"] + self._update_cache_and_file(data, namespace) + else: + return None + except Exception: + logger.exception("an error occurred in _do_heart_beat") + return None + + def get_all_dicts(self, namespace): + namespace_data = self._cache.get(namespace) + if namespace_data is None: + net_namespace_data = self.get_json_from_net(namespace) + if not net_namespace_data: + return namespace_data + namespace_data = net_namespace_data.get(CONFIGURATIONS) + if namespace_data: + self._update_cache_and_file(namespace_data, namespace) + return namespace_data diff --git a/api/configs/remote_settings_sources/apollo/python_3x.py b/api/configs/remote_settings_sources/apollo/python_3x.py new file mode 100644 index 0000000000..6a5f381991 --- /dev/null +++ b/api/configs/remote_settings_sources/apollo/python_3x.py @@ -0,0 +1,41 @@ +import logging +import os +import ssl +import urllib.request +from urllib import parse +from urllib.error import HTTPError + +# Create an SSL context that allows for a lower level of security +ssl_context = ssl.create_default_context() +ssl_context.set_ciphers("HIGH:!DH:!aNULL") +ssl_context.check_hostname = False +ssl_context.verify_mode = ssl.CERT_NONE + +# Create an opener object and pass in a custom SSL context +opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=ssl_context)) + +urllib.request.install_opener(opener) + +logger = logging.getLogger(__name__) + + +def http_request(url, timeout, headers={}): + try: + request = urllib.request.Request(url, headers=headers) + res = urllib.request.urlopen(request, timeout=timeout) + body = res.read().decode("utf-8") + return res.code, body + except HTTPError as e: + if e.code == 304: + logger.warning("http_request error,code is 304, maybe you should check secret") + return 304, None + logger.warning("http_request error,code is %d, msg is %s", e.code, e.msg) + raise e + + +def url_encode(params): + return parse.urlencode(params) + + +def makedirs_wrapper(path): + os.makedirs(path, exist_ok=True) diff --git a/api/configs/remote_settings_sources/apollo/utils.py b/api/configs/remote_settings_sources/apollo/utils.py new file mode 100644 index 0000000000..6136112e03 --- /dev/null +++ b/api/configs/remote_settings_sources/apollo/utils.py @@ -0,0 +1,51 @@ +import hashlib +import socket + +from .python_3x import url_encode + +# define constants +CONFIGURATIONS = "configurations" +NOTIFICATION_ID = "notificationId" +NAMESPACE_NAME = "namespaceName" + + +# add timestamps uris and keys +def signature(timestamp, uri, secret): + import base64 + import hmac + + string_to_sign = "" + timestamp + "\n" + uri + hmac_code = hmac.new(secret.encode(), string_to_sign.encode(), hashlib.sha1).digest() + return base64.b64encode(hmac_code).decode() + + +def url_encode_wrapper(params): + return url_encode(params) + + +def no_key_cache_key(namespace, key): + return "{}{}{}".format(namespace, len(namespace), key) + + +# Returns whether the obtained value is obtained, and None if it does not +def get_value_from_dict(namespace_cache, key): + if namespace_cache: + kv_data = namespace_cache.get(CONFIGURATIONS) + if kv_data is None: + return None + if key in kv_data: + return kv_data[key] + return None + + +def init_ip(): + ip = "" + s = None + try: + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.connect(("8.8.8.8", 53)) + ip = s.getsockname()[0] + finally: + if s: + s.close() + return ip diff --git a/api/configs/remote_settings_sources/base.py b/api/configs/remote_settings_sources/base.py new file mode 100644 index 0000000000..a96ffdfb4b --- /dev/null +++ b/api/configs/remote_settings_sources/base.py @@ -0,0 +1,15 @@ +from collections.abc import Mapping +from typing import Any + +from pydantic.fields import FieldInfo + + +class RemoteSettingsSource: + def __init__(self, configs: Mapping[str, Any]): + pass + + def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]: + raise NotImplementedError + + def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any: + return value diff --git a/api/configs/remote_settings_sources/enums.py b/api/configs/remote_settings_sources/enums.py new file mode 100644 index 0000000000..3081f2950f --- /dev/null +++ b/api/configs/remote_settings_sources/enums.py @@ -0,0 +1,5 @@ +from enum import StrEnum + + +class RemoteSettingsSourceName(StrEnum): + APOLLO = "apollo" diff --git a/api/constants/__init__.py b/api/constants/__init__.py index 05795e11d7..4500ef4306 100644 --- a/api/constants/__init__.py +++ b/api/constants/__init__.py @@ -14,11 +14,11 @@ AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS]) if dify_config.ETL_TYPE == "Unstructured": - DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls"] + DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls"] DOCUMENT_EXTENSIONS.extend(("docx", "csv", "eml", "msg", "pptx", "xml", "epub")) if dify_config.UNSTRUCTURED_API_URL: DOCUMENT_EXTENSIONS.append("ppt") DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS]) else: - DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls", "docx", "csv"] + DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "docx", "csv"] DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS]) diff --git a/api/controllers/console/explore/installed_app.py b/api/controllers/console/explore/installed_app.py index b60c4e176b..3de179164d 100644 --- a/api/controllers/console/explore/installed_app.py +++ b/api/controllers/console/explore/installed_app.py @@ -1,5 +1,6 @@ from datetime import UTC, datetime +from flask import request from flask_login import current_user from flask_restful import Resource, inputs, marshal_with, reqparse from sqlalchemy import and_ @@ -20,8 +21,17 @@ class InstalledAppsListApi(Resource): @account_initialization_required @marshal_with(installed_app_list_fields) def get(self): + app_id = request.args.get("app_id", default=None, type=str) current_tenant_id = current_user.current_tenant_id - installed_apps = db.session.query(InstalledApp).filter(InstalledApp.tenant_id == current_tenant_id).all() + + if app_id: + installed_apps = ( + db.session.query(InstalledApp) + .filter(and_(InstalledApp.tenant_id == current_tenant_id, InstalledApp.app_id == app_id)) + .all() + ) + else: + installed_apps = db.session.query(InstalledApp).filter(InstalledApp.tenant_id == current_tenant_id).all() current_user.role = TenantService.get_user_role(current_user, current_user.current_tenant) installed_apps = [ diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index 9ecda2126d..2cd6dcda3b 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -368,6 +368,7 @@ class ToolWorkflowProviderCreateApi(Resource): description=args["description"], parameters=args["parameters"], privacy_policy=args["privacy_policy"], + labels=args["labels"], ) diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index bd4fd9cd3b..6200299d21 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -3,7 +3,7 @@ import logging import threading import uuid from collections.abc import Generator, Mapping -from typing import Any, Optional, Union +from typing import Any, Literal, Optional, Union, overload from flask import Flask, current_app from pydantic import ValidationError @@ -36,6 +36,29 @@ logger = logging.getLogger(__name__) class AdvancedChatAppGenerator(MessageBasedAppGenerator): _dialogue_count: int + @overload + def generate( + self, + app_model: App, + workflow: Workflow, + user: Union[Account, EndUser], + args: Mapping[str, Any], + invoke_from: InvokeFrom, + streaming: Literal[True], + ) -> Generator[str, None, None]: ... + + @overload + def generate( + self, + app_model: App, + workflow: Workflow, + user: Union[Account, EndUser], + args: Mapping[str, Any], + invoke_from: InvokeFrom, + streaming: Literal[False], + ) -> Mapping[str, Any]: ... + + @overload def generate( self, app_model: App, @@ -44,7 +67,17 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): args: Mapping[str, Any], invoke_from: InvokeFrom, streaming: bool = True, - ) -> Mapping[str, Any] | Generator[str, None, None]: + ) -> Union[Mapping[str, Any], Generator[str, None, None]]: ... + + def generate( + self, + app_model: App, + workflow: Workflow, + user: Union[Account, EndUser], + args: Mapping[str, Any], + invoke_from: InvokeFrom, + streaming: bool = True, + ): """ Generate App response. diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index cd12690e28..32a23a7fdb 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -19,6 +19,7 @@ from core.app.entities.queue_entities import ( QueueIterationNextEvent, QueueIterationStartEvent, QueueMessageReplaceEvent, + QueueNodeExceptionEvent, QueueNodeFailedEvent, QueueNodeInIterationFailedEvent, QueueNodeStartedEvent, @@ -31,6 +32,7 @@ from core.app.entities.queue_entities import ( QueueStopEvent, QueueTextChunkEvent, QueueWorkflowFailedEvent, + QueueWorkflowPartialSuccessEvent, QueueWorkflowStartedEvent, QueueWorkflowSucceededEvent, ) @@ -127,7 +129,6 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc self._conversation_name_generate_thread = None self._recorded_files: list[Mapping[str, Any]] = [] - self.total_tokens: int = 0 def process(self): """ @@ -318,7 +319,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc if response: yield response - elif isinstance(event, QueueNodeFailedEvent | QueueNodeInIterationFailedEvent): + elif isinstance(event, QueueNodeFailedEvent | QueueNodeInIterationFailedEvent | QueueNodeExceptionEvent): workflow_node_execution = self._handle_workflow_node_execution_failed(event) response = self._workflow_node_finish_to_stream_response( @@ -361,8 +362,6 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc if not workflow_run: raise Exception("Workflow run not initialized.") - # FIXME for issue #11221 quick fix maybe have a better solution - self.total_tokens += event.metadata.get("total_tokens", 0) if event.metadata else 0 yield self._workflow_iteration_completed_to_stream_response( task_id=self._application_generate_entity.task_id, workflow_run=workflow_run, event=event ) @@ -376,7 +375,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc workflow_run = self._handle_workflow_run_success( workflow_run=workflow_run, start_at=graph_runtime_state.start_at, - total_tokens=graph_runtime_state.total_tokens or self.total_tokens, + total_tokens=graph_runtime_state.total_tokens, total_steps=graph_runtime_state.node_run_steps, outputs=event.outputs, conversation_id=self._conversation.id, @@ -387,6 +386,29 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc task_id=self._application_generate_entity.task_id, workflow_run=workflow_run ) + self._queue_manager.publish(QueueAdvancedChatMessageEndEvent(), PublishFrom.TASK_PIPELINE) + elif isinstance(event, QueueWorkflowPartialSuccessEvent): + if not workflow_run: + raise Exception("Workflow run not initialized.") + + if not graph_runtime_state: + raise Exception("Graph runtime state not initialized.") + + workflow_run = self._handle_workflow_run_partial_success( + workflow_run=workflow_run, + start_at=graph_runtime_state.start_at, + total_tokens=graph_runtime_state.total_tokens, + total_steps=graph_runtime_state.node_run_steps, + outputs=event.outputs, + exceptions_count=event.exceptions_count, + conversation_id=None, + trace_manager=trace_manager, + ) + + yield self._workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, workflow_run=workflow_run + ) + self._queue_manager.publish(QueueAdvancedChatMessageEndEvent(), PublishFrom.TASK_PIPELINE) elif isinstance(event, QueueWorkflowFailedEvent): if not workflow_run: @@ -404,6 +426,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc error=event.error, conversation_id=self._conversation.id, trace_manager=trace_manager, + exceptions_count=event.exceptions_count, ) yield self._workflow_finish_to_stream_response( diff --git a/api/core/app/apps/agent_chat/app_generator.py b/api/core/app/apps/agent_chat/app_generator.py index b659c18556..b391169e3d 100644 --- a/api/core/app/apps/agent_chat/app_generator.py +++ b/api/core/app/apps/agent_chat/app_generator.py @@ -2,7 +2,7 @@ import logging import threading import uuid from collections.abc import Generator, Mapping -from typing import Any, Union +from typing import Any, Literal, Union, overload from flask import Flask, current_app from pydantic import ValidationError @@ -28,6 +28,39 @@ logger = logging.getLogger(__name__) class AgentChatAppGenerator(MessageBasedAppGenerator): + @overload + def generate( + self, + *, + app_model: App, + user: Union[Account, EndUser], + args: Mapping[str, Any], + invoke_from: InvokeFrom, + streaming: Literal[True], + ) -> Generator[str, None, None]: ... + + @overload + def generate( + self, + *, + app_model: App, + user: Union[Account, EndUser], + args: Mapping[str, Any], + invoke_from: InvokeFrom, + streaming: Literal[False], + ) -> Mapping[str, Any]: ... + + @overload + def generate( + self, + *, + app_model: App, + user: Union[Account, EndUser], + args: Mapping[str, Any], + invoke_from: InvokeFrom, + streaming: bool, + ) -> Mapping[str, Any] | Generator[str, None, None]: ... + def generate( self, *, @@ -36,7 +69,7 @@ class AgentChatAppGenerator(MessageBasedAppGenerator): args: Mapping[str, Any], invoke_from: InvokeFrom, streaming: bool = True, - ) -> Mapping[str, Any] | Generator[str, None, None]: + ): """ Generate App response. diff --git a/api/core/app/apps/chat/app_generator.py b/api/core/app/apps/chat/app_generator.py index 6a9e162388..5b8debaaae 100644 --- a/api/core/app/apps/chat/app_generator.py +++ b/api/core/app/apps/chat/app_generator.py @@ -1,7 +1,7 @@ import logging import threading import uuid -from collections.abc import Generator +from collections.abc import Generator, Mapping from typing import Any, Literal, Union, overload from flask import Flask, current_app @@ -34,9 +34,9 @@ class ChatAppGenerator(MessageBasedAppGenerator): self, app_model: App, user: Union[Account, EndUser], - args: Any, + args: Mapping[str, Any], invoke_from: InvokeFrom, - stream: Literal[True] = True, + streaming: Literal[True], ) -> Generator[str, None, None]: ... @overload @@ -44,19 +44,29 @@ class ChatAppGenerator(MessageBasedAppGenerator): self, app_model: App, user: Union[Account, EndUser], - args: Any, + args: Mapping[str, Any], invoke_from: InvokeFrom, - stream: Literal[False] = False, - ) -> dict: ... + streaming: Literal[False], + ) -> Mapping[str, Any]: ... + + @overload + def generate( + self, + app_model: App, + user: Union[Account, EndUser], + args: Mapping[str, Any], + invoke_from: InvokeFrom, + streaming: bool, + ) -> Union[Mapping[str, Any], Generator[str, None, None]]: ... def generate( self, app_model: App, user: Union[Account, EndUser], - args: Any, + args: Mapping[str, Any], invoke_from: InvokeFrom, streaming: bool = True, - ) -> Union[dict, Generator[str, None, None]]: + ): """ Generate App response. diff --git a/api/core/app/apps/completion/app_generator.py b/api/core/app/apps/completion/app_generator.py index 324e837a1c..14fd33dd39 100644 --- a/api/core/app/apps/completion/app_generator.py +++ b/api/core/app/apps/completion/app_generator.py @@ -1,7 +1,7 @@ import logging import threading import uuid -from collections.abc import Generator +from collections.abc import Generator, Mapping from typing import Any, Literal, Union, overload from flask import Flask, current_app @@ -34,9 +34,9 @@ class CompletionAppGenerator(MessageBasedAppGenerator): self, app_model: App, user: Union[Account, EndUser], - args: dict, + args: Mapping[str, Any], invoke_from: InvokeFrom, - stream: Literal[True] = True, + streaming: Literal[True], ) -> Generator[str, None, None]: ... @overload @@ -44,14 +44,29 @@ class CompletionAppGenerator(MessageBasedAppGenerator): self, app_model: App, user: Union[Account, EndUser], - args: dict, + args: Mapping[str, Any], invoke_from: InvokeFrom, - stream: Literal[False] = False, - ) -> dict: ... + streaming: Literal[False], + ) -> Mapping[str, Any]: ... + + @overload + def generate( + self, + app_model: App, + user: Union[Account, EndUser], + args: Mapping[str, Any], + invoke_from: InvokeFrom, + streaming: bool, + ) -> Mapping[str, Any] | Generator[str, None, None]: ... def generate( - self, app_model: App, user: Union[Account, EndUser], args: Any, invoke_from: InvokeFrom, streaming: bool = True - ) -> Union[dict, Generator[str, None, None]]: + self, + app_model: App, + user: Union[Account, EndUser], + args: Mapping[str, Any], + invoke_from: InvokeFrom, + streaming: bool = True, + ): """ Generate App response. diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index 7acf05326e..dc4ee9e566 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -3,7 +3,7 @@ import logging import threading import uuid from collections.abc import Generator, Mapping, Sequence -from typing import Any, Optional, Union +from typing import Any, Literal, Optional, Union, overload from flask import Flask, current_app from pydantic import ValidationError @@ -30,6 +30,35 @@ logger = logging.getLogger(__name__) class WorkflowAppGenerator(BaseAppGenerator): + @overload + def generate( + self, + *, + app_model: App, + workflow: Workflow, + user: Account | EndUser, + args: Mapping[str, Any], + invoke_from: InvokeFrom, + streaming: Literal[True], + call_depth: int = 0, + workflow_thread_pool_id: Optional[str] = None, + ) -> Generator[str, None, None]: ... + + @overload + def generate( + self, + *, + app_model: App, + workflow: Workflow, + user: Account | EndUser, + args: Mapping[str, Any], + invoke_from: InvokeFrom, + streaming: Literal[False], + call_depth: int = 0, + workflow_thread_pool_id: Optional[str] = None, + ) -> Mapping[str, Any]: ... + + @overload def generate( self, *, @@ -41,7 +70,20 @@ class WorkflowAppGenerator(BaseAppGenerator): streaming: bool = True, call_depth: int = 0, workflow_thread_pool_id: Optional[str] = None, - ) -> Mapping[str, Any] | Generator[str, None, None]: + ) -> Mapping[str, Any] | Generator[str, None, None]: ... + + def generate( + self, + *, + app_model: App, + workflow: Workflow, + user: Account | EndUser, + args: Mapping[str, Any], + invoke_from: InvokeFrom, + streaming: bool = True, + call_depth: int = 0, + workflow_thread_pool_id: Optional[str] = None, + ): files: Sequence[Mapping[str, Any]] = args.get("files") or [] # parse files diff --git a/api/core/app/apps/workflow/app_queue_manager.py b/api/core/app/apps/workflow/app_queue_manager.py index 76371f800b..349b8eb51b 100644 --- a/api/core/app/apps/workflow/app_queue_manager.py +++ b/api/core/app/apps/workflow/app_queue_manager.py @@ -6,6 +6,7 @@ from core.app.entities.queue_entities import ( QueueMessageEndEvent, QueueStopEvent, QueueWorkflowFailedEvent, + QueueWorkflowPartialSuccessEvent, QueueWorkflowSucceededEvent, WorkflowQueueMessage, ) @@ -34,7 +35,8 @@ class WorkflowAppQueueManager(AppQueueManager): | QueueErrorEvent | QueueMessageEndEvent | QueueWorkflowSucceededEvent - | QueueWorkflowFailedEvent, + | QueueWorkflowFailedEvent + | QueueWorkflowPartialSuccessEvent, ): self.stop_listen() diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index 9966a1a9d1..2330229f43 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -15,6 +15,7 @@ from core.app.entities.queue_entities import ( QueueIterationCompletedEvent, QueueIterationNextEvent, QueueIterationStartEvent, + QueueNodeExceptionEvent, QueueNodeFailedEvent, QueueNodeInIterationFailedEvent, QueueNodeStartedEvent, @@ -26,6 +27,7 @@ from core.app.entities.queue_entities import ( QueueStopEvent, QueueTextChunkEvent, QueueWorkflowFailedEvent, + QueueWorkflowPartialSuccessEvent, QueueWorkflowStartedEvent, QueueWorkflowSucceededEvent, ) @@ -106,7 +108,6 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa self._task_state = WorkflowTaskState() self._wip_workflow_node_executions = {} - self.total_tokens: int = 0 def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: """ @@ -258,36 +259,36 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa workflow_node_execution = self._handle_node_execution_start(workflow_run=workflow_run, event=event) - response = self._workflow_node_start_to_stream_response( + node_start_response = self._workflow_node_start_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, workflow_node_execution=workflow_node_execution, ) - if response: - yield response + if node_start_response: + yield node_start_response elif isinstance(event, QueueNodeSucceededEvent): workflow_node_execution = self._handle_workflow_node_execution_success(event) - response = self._workflow_node_finish_to_stream_response( + node_success_response = self._workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, workflow_node_execution=workflow_node_execution, ) - if response: - yield response - elif isinstance(event, QueueNodeFailedEvent | QueueNodeInIterationFailedEvent): + if node_success_response: + yield node_success_response + elif isinstance(event, QueueNodeFailedEvent | QueueNodeInIterationFailedEvent | QueueNodeExceptionEvent): workflow_node_execution = self._handle_workflow_node_execution_failed(event) - response = self._workflow_node_finish_to_stream_response( + node_failed_response = self._workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, workflow_node_execution=workflow_node_execution, ) - if response: - yield response + if node_failed_response: + yield node_failed_response elif isinstance(event, QueueParallelBranchRunStartedEvent): if not workflow_run: raise Exception("Workflow run not initialized.") @@ -320,8 +321,6 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa if not workflow_run: raise Exception("Workflow run not initialized.") - # FIXME for issue #11221 quick fix maybe have a better solution - self.total_tokens += event.metadata.get("total_tokens", 0) if event.metadata else 0 yield self._workflow_iteration_completed_to_stream_response( task_id=self._application_generate_entity.task_id, workflow_run=workflow_run, event=event ) @@ -335,7 +334,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa workflow_run = self._handle_workflow_run_success( workflow_run=workflow_run, start_at=graph_runtime_state.start_at, - total_tokens=graph_runtime_state.total_tokens or self.total_tokens, + total_tokens=graph_runtime_state.total_tokens, total_steps=graph_runtime_state.node_run_steps, outputs=event.outputs, conversation_id=None, @@ -345,6 +344,30 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa # save workflow app log self._save_workflow_app_log(workflow_run) + yield self._workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, workflow_run=workflow_run + ) + elif isinstance(event, QueueWorkflowPartialSuccessEvent): + if not workflow_run: + raise Exception("Workflow run not initialized.") + + if not graph_runtime_state: + raise Exception("Graph runtime state not initialized.") + + workflow_run = self._handle_workflow_run_partial_success( + workflow_run=workflow_run, + start_at=graph_runtime_state.start_at, + total_tokens=graph_runtime_state.total_tokens, + total_steps=graph_runtime_state.node_run_steps, + outputs=event.outputs, + exceptions_count=event.exceptions_count, + conversation_id=None, + trace_manager=trace_manager, + ) + + # save workflow app log + self._save_workflow_app_log(workflow_run) + yield self._workflow_finish_to_stream_response( task_id=self._application_generate_entity.task_id, workflow_run=workflow_run ) @@ -354,7 +377,6 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa if not graph_runtime_state: raise Exception("Graph runtime state not initialized.") - workflow_run = self._handle_workflow_run_failed( workflow_run=workflow_run, start_at=graph_runtime_state.start_at, @@ -366,6 +388,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa error=event.error if isinstance(event, QueueWorkflowFailedEvent) else event.get_stop_reason(), conversation_id=None, trace_manager=trace_manager, + exceptions_count=event.exceptions_count if isinstance(event, QueueWorkflowFailedEvent) else 0, ) # save workflow app log diff --git a/api/core/app/apps/workflow_app_runner.py b/api/core/app/apps/workflow_app_runner.py index 3d46b8bab0..97c2cc5bb9 100644 --- a/api/core/app/apps/workflow_app_runner.py +++ b/api/core/app/apps/workflow_app_runner.py @@ -8,6 +8,7 @@ from core.app.entities.queue_entities import ( QueueIterationCompletedEvent, QueueIterationNextEvent, QueueIterationStartEvent, + QueueNodeExceptionEvent, QueueNodeFailedEvent, QueueNodeInIterationFailedEvent, QueueNodeStartedEvent, @@ -18,6 +19,7 @@ from core.app.entities.queue_entities import ( QueueRetrieverResourcesEvent, QueueTextChunkEvent, QueueWorkflowFailedEvent, + QueueWorkflowPartialSuccessEvent, QueueWorkflowStartedEvent, QueueWorkflowSucceededEvent, ) @@ -25,6 +27,7 @@ from core.workflow.entities.variable_pool import VariablePool from core.workflow.graph_engine.entities.event import ( GraphEngineEvent, GraphRunFailedEvent, + GraphRunPartialSucceededEvent, GraphRunStartedEvent, GraphRunSucceededEvent, IterationRunFailedEvent, @@ -32,6 +35,7 @@ from core.workflow.graph_engine.entities.event import ( IterationRunStartedEvent, IterationRunSucceededEvent, NodeInIterationFailedEvent, + NodeRunExceptionEvent, NodeRunFailedEvent, NodeRunRetrieverResourceEvent, NodeRunStartedEvent, @@ -176,8 +180,12 @@ class WorkflowBasedAppRunner(AppRunner): ) elif isinstance(event, GraphRunSucceededEvent): self._publish_event(QueueWorkflowSucceededEvent(outputs=event.outputs)) + elif isinstance(event, GraphRunPartialSucceededEvent): + self._publish_event( + QueueWorkflowPartialSuccessEvent(outputs=event.outputs, exceptions_count=event.exceptions_count) + ) elif isinstance(event, GraphRunFailedEvent): - self._publish_event(QueueWorkflowFailedEvent(error=event.error)) + self._publish_event(QueueWorkflowFailedEvent(error=event.error, exceptions_count=event.exceptions_count)) elif isinstance(event, NodeRunStartedEvent): self._publish_event( QueueNodeStartedEvent( @@ -253,6 +261,36 @@ class WorkflowBasedAppRunner(AppRunner): in_iteration_id=event.in_iteration_id, ) ) + elif isinstance(event, NodeRunExceptionEvent): + self._publish_event( + QueueNodeExceptionEvent( + node_execution_id=event.id, + node_id=event.node_id, + node_type=event.node_type, + node_data=event.node_data, + parallel_id=event.parallel_id, + parallel_start_node_id=event.parallel_start_node_id, + parent_parallel_id=event.parent_parallel_id, + parent_parallel_start_node_id=event.parent_parallel_start_node_id, + start_at=event.route_node_state.start_at, + inputs=event.route_node_state.node_run_result.inputs + if event.route_node_state.node_run_result + else {}, + process_data=event.route_node_state.node_run_result.process_data + if event.route_node_state.node_run_result + else {}, + outputs=event.route_node_state.node_run_result.outputs + if event.route_node_state.node_run_result + else {}, + error=event.route_node_state.node_run_result.error + if event.route_node_state.node_run_result and event.route_node_state.node_run_result.error + else "Unknown error", + execution_metadata=event.route_node_state.node_run_result.metadata + if event.route_node_state.node_run_result + else {}, + in_iteration_id=event.in_iteration_id, + ) + ) elif isinstance(event, NodeInIterationFailedEvent): self._publish_event( QueueNodeInIterationFailedEvent( diff --git a/api/core/app/entities/queue_entities.py b/api/core/app/entities/queue_entities.py index 15543638fc..5b2036c7f9 100644 --- a/api/core/app/entities/queue_entities.py +++ b/api/core/app/entities/queue_entities.py @@ -2,7 +2,7 @@ from datetime import datetime from enum import Enum, StrEnum from typing import Any, Optional -from pydantic import BaseModel, field_validator +from pydantic import BaseModel from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk from core.workflow.entities.node_entities import NodeRunMetadataKey @@ -25,12 +25,14 @@ class QueueEvent(StrEnum): WORKFLOW_STARTED = "workflow_started" WORKFLOW_SUCCEEDED = "workflow_succeeded" WORKFLOW_FAILED = "workflow_failed" + WORKFLOW_PARTIAL_SUCCEEDED = "workflow_partial_succeeded" ITERATION_START = "iteration_start" ITERATION_NEXT = "iteration_next" ITERATION_COMPLETED = "iteration_completed" NODE_STARTED = "node_started" NODE_SUCCEEDED = "node_succeeded" NODE_FAILED = "node_failed" + NODE_EXCEPTION = "node_exception" RETRIEVER_RESOURCES = "retriever_resources" ANNOTATION_REPLY = "annotation_reply" AGENT_THOUGHT = "agent_thought" @@ -113,18 +115,6 @@ class QueueIterationNextEvent(AppQueueEvent): output: Optional[Any] = None # output for the current iteration duration: Optional[float] = None - @field_validator("output", mode="before") - @classmethod - def set_output(cls, v): - """ - Set output - """ - if v is None: - return None - if isinstance(v, int | float | str | bool | dict | list): - return v - raise ValueError("output must be a valid type") - class QueueIterationCompletedEvent(AppQueueEvent): """ @@ -249,6 +239,17 @@ class QueueWorkflowFailedEvent(AppQueueEvent): event: QueueEvent = QueueEvent.WORKFLOW_FAILED error: str + exceptions_count: int + + +class QueueWorkflowPartialSuccessEvent(AppQueueEvent): + """ + QueueWorkflowFailedEvent entity + """ + + event: QueueEvent = QueueEvent.WORKFLOW_PARTIAL_SUCCEEDED + exceptions_count: int + outputs: Optional[dict[str, Any]] = None class QueueNodeStartedEvent(AppQueueEvent): @@ -343,6 +344,37 @@ class QueueNodeInIterationFailedEvent(AppQueueEvent): error: str +class QueueNodeExceptionEvent(AppQueueEvent): + """ + QueueNodeExceptionEvent entity + """ + + event: QueueEvent = QueueEvent.NODE_EXCEPTION + + node_execution_id: str + node_id: str + node_type: NodeType + node_data: BaseNodeData + parallel_id: Optional[str] = None + """parallel id if node is in parallel""" + parallel_start_node_id: Optional[str] = None + """parallel start node id if node is in parallel""" + parent_parallel_id: Optional[str] = None + """parent parallel id if node is in parallel""" + parent_parallel_start_node_id: Optional[str] = None + """parent parallel start node id if node is in parallel""" + in_iteration_id: Optional[str] = None + """iteration id if node is in iteration""" + start_at: datetime + + inputs: Optional[dict[str, Any]] = None + process_data: Optional[dict[str, Any]] = None + outputs: Optional[dict[str, Any]] = None + execution_metadata: Optional[dict[NodeRunMetadataKey, Any]] = None + + error: str + + class QueueNodeFailedEvent(AppQueueEvent): """ QueueNodeFailedEvent entity diff --git a/api/core/app/entities/task_entities.py b/api/core/app/entities/task_entities.py index 03cc6941a8..7fe06b3af8 100644 --- a/api/core/app/entities/task_entities.py +++ b/api/core/app/entities/task_entities.py @@ -213,6 +213,7 @@ class WorkflowFinishStreamResponse(StreamResponse): created_by: Optional[dict] = None created_at: int finished_at: int + exceptions_count: Optional[int] = 0 files: Optional[Sequence[Mapping[str, Any]]] = [] event: StreamEvent = StreamEvent.WORKFLOW_FINISHED diff --git a/api/core/app/features/rate_limiting/rate_limit.py b/api/core/app/features/rate_limiting/rate_limit.py index 154a49ebda..8fe1d96b37 100644 --- a/api/core/app/features/rate_limiting/rate_limit.py +++ b/api/core/app/features/rate_limiting/rate_limit.py @@ -110,7 +110,7 @@ class RateLimitGenerator: raise StopIteration try: return next(self.generator) - except StopIteration: + except Exception: self.close() raise diff --git a/api/core/app/task_pipeline/workflow_cycle_manage.py b/api/core/app/task_pipeline/workflow_cycle_manage.py index 57a02f8bc8..d78f124e3a 100644 --- a/api/core/app/task_pipeline/workflow_cycle_manage.py +++ b/api/core/app/task_pipeline/workflow_cycle_manage.py @@ -12,6 +12,7 @@ from core.app.entities.queue_entities import ( QueueIterationCompletedEvent, QueueIterationNextEvent, QueueIterationStartEvent, + QueueNodeExceptionEvent, QueueNodeFailedEvent, QueueNodeInIterationFailedEvent, QueueNodeStartedEvent, @@ -164,6 +165,55 @@ class WorkflowCycleManage: return workflow_run + def _handle_workflow_run_partial_success( + self, + workflow_run: WorkflowRun, + start_at: float, + total_tokens: int, + total_steps: int, + outputs: Mapping[str, Any] | None = None, + exceptions_count: int = 0, + conversation_id: Optional[str] = None, + trace_manager: Optional[TraceQueueManager] = None, + ) -> WorkflowRun: + """ + Workflow run success + :param workflow_run: workflow run + :param start_at: start time + :param total_tokens: total tokens + :param total_steps: total steps + :param outputs: outputs + :param conversation_id: conversation id + :return: + """ + workflow_run = self._refetch_workflow_run(workflow_run.id) + + outputs = WorkflowEntry.handle_special_values(outputs) + + workflow_run.status = WorkflowRunStatus.PARTIAL_SUCCESSED.value + workflow_run.outputs = json.dumps(outputs or {}) + workflow_run.elapsed_time = time.perf_counter() - start_at + workflow_run.total_tokens = total_tokens + workflow_run.total_steps = total_steps + workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None) + workflow_run.exceptions_count = exceptions_count + db.session.commit() + db.session.refresh(workflow_run) + + if trace_manager: + trace_manager.add_trace_task( + TraceTask( + TraceTaskName.WORKFLOW_TRACE, + workflow_run=workflow_run, + conversation_id=conversation_id, + user_id=trace_manager.user_id, + ) + ) + + db.session.close() + + return workflow_run + def _handle_workflow_run_failed( self, workflow_run: WorkflowRun, @@ -174,6 +224,7 @@ class WorkflowCycleManage: error: str, conversation_id: Optional[str] = None, trace_manager: Optional[TraceQueueManager] = None, + exceptions_count: int = 0, ) -> WorkflowRun: """ Workflow run failed @@ -193,7 +244,7 @@ class WorkflowCycleManage: workflow_run.total_tokens = total_tokens workflow_run.total_steps = total_steps workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None) - + workflow_run.exceptions_count = exceptions_count db.session.commit() running_workflow_node_executions = ( @@ -318,7 +369,7 @@ class WorkflowCycleManage: return workflow_node_execution def _handle_workflow_node_execution_failed( - self, event: QueueNodeFailedEvent | QueueNodeInIterationFailedEvent + self, event: QueueNodeFailedEvent | QueueNodeInIterationFailedEvent | QueueNodeExceptionEvent ) -> WorkflowNodeExecution: """ Workflow node execution failed @@ -337,7 +388,11 @@ class WorkflowCycleManage: ) db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update( { - WorkflowNodeExecution.status: WorkflowNodeExecutionStatus.FAILED.value, + WorkflowNodeExecution.status: ( + WorkflowNodeExecutionStatus.FAILED.value + if not isinstance(event, QueueNodeExceptionEvent) + else WorkflowNodeExecutionStatus.EXCEPTION.value + ), WorkflowNodeExecution.error: event.error, WorkflowNodeExecution.inputs: json.dumps(inputs) if inputs else None, WorkflowNodeExecution.process_data: json.dumps(process_data) if process_data else None, @@ -351,8 +406,11 @@ class WorkflowCycleManage: db.session.commit() db.session.close() process_data = WorkflowEntry.handle_special_values(event.process_data) - - workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value + workflow_node_execution.status = ( + WorkflowNodeExecutionStatus.FAILED.value + if not isinstance(event, QueueNodeExceptionEvent) + else WorkflowNodeExecutionStatus.EXCEPTION.value + ) workflow_node_execution.error = event.error workflow_node_execution.inputs = json.dumps(inputs) if inputs else None workflow_node_execution.process_data = json.dumps(process_data) if process_data else None @@ -433,6 +491,7 @@ class WorkflowCycleManage: created_at=int(workflow_run.created_at.timestamp()), finished_at=int(workflow_run.finished_at.timestamp()), files=self._fetch_files_from_node_outputs(workflow_run.outputs_dict), + exceptions_count=workflow_run.exceptions_count, ), ) @@ -483,7 +542,10 @@ class WorkflowCycleManage: def _workflow_node_finish_to_stream_response( self, - event: QueueNodeSucceededEvent | QueueNodeFailedEvent | QueueNodeInIterationFailedEvent, + event: QueueNodeSucceededEvent + | QueueNodeFailedEvent + | QueueNodeInIterationFailedEvent + | QueueNodeExceptionEvent, task_id: str, workflow_node_execution: WorkflowNodeExecution, ) -> Optional[NodeFinishStreamResponse]: diff --git a/api/core/file/file_manager.py b/api/core/file/file_manager.py index 6d8086435d..3b83683755 100644 --- a/api/core/file/file_manager.py +++ b/api/core/file/file_manager.py @@ -141,7 +141,7 @@ def _to_url(f: File, /): elif f.transfer_method == FileTransferMethod.LOCAL_FILE: if f.related_id is None: raise ValueError("Missing file related_id") - return helpers.get_signed_file_url(upload_file_id=f.related_id) + return f.remote_url or helpers.get_signed_file_url(upload_file_id=f.related_id) elif f.transfer_method == FileTransferMethod.TOOL_FILE: # add sign url if f.related_id is None or f.extension is None: diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py index 5b0e9d807c..cbbf3ebe8c 100644 --- a/api/core/helper/ssrf_proxy.py +++ b/api/core/helper/ssrf_proxy.py @@ -24,6 +24,12 @@ BACKOFF_FACTOR = 0.5 STATUS_FORCELIST = [429, 500, 502, 503, 504] +class MaxRetriesExceededError(Exception): + """Raised when the maximum number of retries is exceeded.""" + + pass + + def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): if "allow_redirects" in kwargs: allow_redirects = kwargs.pop("allow_redirects") @@ -66,7 +72,7 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): if retries <= max_retries: time.sleep(BACKOFF_FACTOR * (2 ** (retries - 1))) - raise Exception(f"Reached maximum retries ({max_retries}) for URL {url}") + raise MaxRetriesExceededError(f"Reached maximum retries ({max_retries}) for URL {url}") def get(url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): diff --git a/api/core/model_runtime/model_providers/bedrock/get_bedrock_client.py b/api/core/model_runtime/model_providers/bedrock/get_bedrock_client.py new file mode 100644 index 0000000000..a19ffbb20a --- /dev/null +++ b/api/core/model_runtime/model_providers/bedrock/get_bedrock_client.py @@ -0,0 +1,21 @@ +import boto3 +from botocore.config import Config + + +def get_bedrock_client(service_name, credentials=None): + client_config = Config(region_name=credentials["aws_region"]) + aws_access_key_id = credentials["aws_access_key_id"] + aws_secret_access_key = credentials["aws_secret_access_key"] + if aws_access_key_id and aws_secret_access_key: + # use aksk to call bedrock + client = boto3.client( + service_name=service_name, + config=client_config, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + ) + else: + # use iam without aksk to call + client = boto3.client(service_name=service_name, config=client_config) + + return client diff --git a/api/core/model_runtime/model_providers/bedrock/llm/llm.py b/api/core/model_runtime/model_providers/bedrock/llm/llm.py index e6e8a765ee..75ed7ad624 100644 --- a/api/core/model_runtime/model_providers/bedrock/llm/llm.py +++ b/api/core/model_runtime/model_providers/bedrock/llm/llm.py @@ -40,6 +40,7 @@ from core.model_runtime.errors.invoke import ( ) from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel +from core.model_runtime.model_providers.bedrock.get_bedrock_client import get_bedrock_client logger = logging.getLogger(__name__) ANTHROPIC_BLOCK_MODE_PROMPT = """You should always follow the instructions and output a valid {{block}} object. @@ -173,13 +174,7 @@ class BedrockLargeLanguageModel(LargeLanguageModel): :param stream: is stream response :return: full response or stream response chunk generator result """ - bedrock_client = boto3.client( - service_name="bedrock-runtime", - aws_access_key_id=credentials.get("aws_access_key_id"), - aws_secret_access_key=credentials.get("aws_secret_access_key"), - region_name=credentials["aws_region"], - ) - + bedrock_client = get_bedrock_client("bedrock-runtime", credentials) system, prompt_message_dicts = self._convert_converse_prompt_messages(prompt_messages) inference_config, additional_model_fields = self._convert_converse_api_model_parameters(model_parameters, stop) diff --git a/api/core/model_runtime/model_providers/bedrock/rerank/rerank.py b/api/core/model_runtime/model_providers/bedrock/rerank/rerank.py index 397f65e8c9..e134db646f 100644 --- a/api/core/model_runtime/model_providers/bedrock/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/bedrock/rerank/rerank.py @@ -1,8 +1,5 @@ from typing import Optional -import boto3 -from botocore.config import Config - from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult from core.model_runtime.errors.invoke import ( InvokeAuthorizationError, @@ -14,6 +11,7 @@ from core.model_runtime.errors.invoke import ( ) from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.rerank_model import RerankModel +from core.model_runtime.model_providers.bedrock.get_bedrock_client import get_bedrock_client class BedrockRerankModel(RerankModel): @@ -48,13 +46,7 @@ class BedrockRerankModel(RerankModel): return RerankResult(model=model, docs=docs) # initialize client - client_config = Config(region_name=credentials["aws_region"]) - bedrock_runtime = boto3.client( - service_name="bedrock-agent-runtime", - config=client_config, - aws_access_key_id=credentials.get("aws_access_key_id", ""), - aws_secret_access_key=credentials.get("aws_secret_access_key"), - ) + bedrock_runtime = get_bedrock_client("bedrock-agent-runtime", credentials) queries = [{"type": "TEXT", "textQuery": {"text": query}}] text_sources = [] for text in docs: diff --git a/api/core/model_runtime/model_providers/bedrock/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/bedrock/text_embedding/text_embedding.py index 2f998d8bda..5505797f76 100644 --- a/api/core/model_runtime/model_providers/bedrock/text_embedding/text_embedding.py +++ b/api/core/model_runtime/model_providers/bedrock/text_embedding/text_embedding.py @@ -3,8 +3,6 @@ import logging import time from typing import Optional -import boto3 -from botocore.config import Config from botocore.exceptions import ( ClientError, EndpointConnectionError, @@ -25,6 +23,7 @@ from core.model_runtime.errors.invoke import ( InvokeServerUnavailableError, ) from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel +from core.model_runtime.model_providers.bedrock.get_bedrock_client import get_bedrock_client logger = logging.getLogger(__name__) @@ -48,14 +47,7 @@ class BedrockTextEmbeddingModel(TextEmbeddingModel): :param input_type: input type :return: embeddings result """ - client_config = Config(region_name=credentials["aws_region"]) - - bedrock_runtime = boto3.client( - service_name="bedrock-runtime", - config=client_config, - aws_access_key_id=credentials.get("aws_access_key_id"), - aws_secret_access_key=credentials.get("aws_secret_access_key"), - ) + bedrock_runtime = get_bedrock_client("bedrock-runtime", credentials) embeddings = [] token_usage = 0 diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-2.0-flash-exp.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-2.0-flash-exp.yaml new file mode 100644 index 0000000000..bcd59623a7 --- /dev/null +++ b/api/core/model_runtime/model_providers/google/llm/gemini-2.0-flash-exp.yaml @@ -0,0 +1,39 @@ +model: gemini-2.0-flash-exp +label: + en_US: Gemini 2.0 Flash Exp +model_type: llm +features: + - agent-thought + - vision + - tool-call + - stream-tool-call + - document +model_properties: + mode: chat + context_size: 1048576 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + required: false + - name: max_output_tokens + use_template: max_tokens + default: 8192 + min: 1 + max: 8192 + - name: json_schema + use_template: json_schema +pricing: + input: '0.00' + output: '0.00' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-exp-1206.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-exp-1206.yaml new file mode 100644 index 0000000000..1743d8b968 --- /dev/null +++ b/api/core/model_runtime/model_providers/google/llm/gemini-exp-1206.yaml @@ -0,0 +1,38 @@ +model: gemini-exp-1206 +label: + en_US: Gemini exp 1206 +model_type: llm +features: + - agent-thought + - vision + - tool-call + - stream-tool-call +model_properties: + mode: chat + context_size: 2097152 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + required: false + - name: max_output_tokens + use_template: max_tokens + default: 8192 + min: 1 + max: 8192 + - name: json_schema + use_template: json_schema +pricing: + input: '0.00' + output: '0.00' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/_position.yaml b/api/core/model_runtime/model_providers/groq/llm/_position.yaml index 0613b19f87..279c1bcbe5 100644 --- a/api/core/model_runtime/model_providers/groq/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/groq/llm/_position.yaml @@ -1,4 +1,5 @@ - llama-3.1-405b-reasoning +- llama-3.3-70b-versatile - llama-3.1-70b-versatile - llama-3.1-8b-instant - llama3-70b-8192 diff --git a/api/core/model_runtime/model_providers/groq/llm/gemma-7b-it.yaml b/api/core/model_runtime/model_providers/groq/llm/gemma-7b-it.yaml new file mode 100644 index 0000000000..02f84e95f6 --- /dev/null +++ b/api/core/model_runtime/model_providers/groq/llm/gemma-7b-it.yaml @@ -0,0 +1,25 @@ +model: gemma-7b-it +label: + zh_Hans: Gemma 7B Instruction Tuned + en_US: Gemma 7B Instruction Tuned +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 8192 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: max_tokens + use_template: max_tokens + default: 512 + min: 1 + max: 8192 +pricing: + input: '0.05' + output: '0.1' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/gemma2-9b-it.yaml b/api/core/model_runtime/model_providers/groq/llm/gemma2-9b-it.yaml new file mode 100644 index 0000000000..dad496f668 --- /dev/null +++ b/api/core/model_runtime/model_providers/groq/llm/gemma2-9b-it.yaml @@ -0,0 +1,25 @@ +model: gemma2-9b-it +label: + zh_Hans: Gemma 2 9B Instruction Tuned + en_US: Gemma 2 9B Instruction Tuned +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 8192 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: max_tokens + use_template: max_tokens + default: 512 + min: 1 + max: 8192 +pricing: + input: '0.05' + output: '0.1' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-3.1-70b-versatile.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-3.1-70b-versatile.yaml index ab5f6ab05e..01323a1b8a 100644 --- a/api/core/model_runtime/model_providers/groq/llm/llama-3.1-70b-versatile.yaml +++ b/api/core/model_runtime/model_providers/groq/llm/llama-3.1-70b-versatile.yaml @@ -1,7 +1,8 @@ model: llama-3.1-70b-versatile +deprecated: true label: - zh_Hans: Llama-3.1-70b-versatile - en_US: Llama-3.1-70b-versatile + zh_Hans: Llama-3.1-70b-versatile (DEPRECATED) + en_US: Llama-3.1-70b-versatile (DEPRECATED) model_type: llm features: - agent-thought diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-3.2-11b-text-preview.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-3.2-11b-text-preview.yaml index 019d453723..3f30d81ae4 100644 --- a/api/core/model_runtime/model_providers/groq/llm/llama-3.2-11b-text-preview.yaml +++ b/api/core/model_runtime/model_providers/groq/llm/llama-3.2-11b-text-preview.yaml @@ -1,4 +1,5 @@ model: llama-3.2-11b-text-preview +deprecated: true label: zh_Hans: Llama 3.2 11B Text (Preview) en_US: Llama 3.2 11B Text (Preview) diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-3.2-90b-text-preview.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-3.2-90b-text-preview.yaml index 3b34e7c079..0391a7c890 100644 --- a/api/core/model_runtime/model_providers/groq/llm/llama-3.2-90b-text-preview.yaml +++ b/api/core/model_runtime/model_providers/groq/llm/llama-3.2-90b-text-preview.yaml @@ -1,4 +1,5 @@ model: llama-3.2-90b-text-preview +depraceted: true label: zh_Hans: Llama 3.2 90B Text (Preview) en_US: Llama 3.2 90B Text (Preview) diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-3.3-70b-specdec.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-3.3-70b-specdec.yaml new file mode 100644 index 0000000000..bda9ec530a --- /dev/null +++ b/api/core/model_runtime/model_providers/groq/llm/llama-3.3-70b-specdec.yaml @@ -0,0 +1,25 @@ +model: llama-3.3-70b-specdec +label: + zh_Hans: Llama 3.3 70B Specdec + en_US: Llama 3.3 70B Specdec +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 131072 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: max_tokens + use_template: max_tokens + default: 1024 + min: 1 + max: 32768 +pricing: + input: "0.05" + output: "0.1" + unit: "0.000001" + currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-3.3-70b-versatile.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-3.3-70b-versatile.yaml new file mode 100644 index 0000000000..eb609f4db7 --- /dev/null +++ b/api/core/model_runtime/model_providers/groq/llm/llama-3.3-70b-versatile.yaml @@ -0,0 +1,25 @@ +model: llama-3.3-70b-versatile +label: + zh_Hans: Llama 3.3 70B Versatile + en_US: Llama 3.3 70B Versatile +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 131072 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: max_tokens + use_template: max_tokens + default: 1024 + min: 1 + max: 32768 +pricing: + input: "0.05" + output: "0.1" + unit: "0.000001" + currency: USD diff --git a/api/core/model_runtime/model_providers/groq/llm/llama3-groq-70b-8192-tool-use-preview.yaml b/api/core/model_runtime/model_providers/groq/llm/llama3-groq-70b-8192-tool-use-preview.yaml new file mode 100644 index 0000000000..32ccbf1f4d --- /dev/null +++ b/api/core/model_runtime/model_providers/groq/llm/llama3-groq-70b-8192-tool-use-preview.yaml @@ -0,0 +1,25 @@ +model: llama3-groq-70b-8192-tool-use-preview +label: + zh_Hans: Llama3-groq-70b-8192-tool-use (PREVIEW) + en_US: Llama3-groq-70b-8192-tool-use (PREVIEW) +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 8192 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: max_tokens + use_template: max_tokens + default: 512 + min: 1 + max: 8192 +pricing: + input: '0.05' + output: '0.08' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/minimax/llm/llm.py b/api/core/model_runtime/model_providers/minimax/llm/llm.py index ce7c00f7da..ca9b243c92 100644 --- a/api/core/model_runtime/model_providers/minimax/llm/llm.py +++ b/api/core/model_runtime/model_providers/minimax/llm/llm.py @@ -35,6 +35,7 @@ from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage class MinimaxLargeLanguageModel(LargeLanguageModel): model_apis = { "abab7-chat-preview": MinimaxChatCompletionPro, + "abab6.5t-chat": MinimaxChatCompletionPro, "abab6.5s-chat": MinimaxChatCompletionPro, "abab6.5-chat": MinimaxChatCompletionPro, "abab6-chat": MinimaxChatCompletionPro, diff --git a/api/core/model_runtime/model_providers/mistralai/llm/_position.yaml b/api/core/model_runtime/model_providers/mistralai/llm/_position.yaml index bdb06b7fff..5702797ac4 100644 --- a/api/core/model_runtime/model_providers/mistralai/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/mistralai/llm/_position.yaml @@ -1,3 +1,5 @@ +- pixtral-large-latest +- pixtral-large-2411 - pixtral-12b-2409 - codestral-latest - mistral-embed diff --git a/api/core/model_runtime/model_providers/mistralai/llm/pixtral-12b-2409.yaml b/api/core/model_runtime/model_providers/mistralai/llm/pixtral-12b-2409.yaml index 0b002b49ca..9eb663bc31 100644 --- a/api/core/model_runtime/model_providers/mistralai/llm/pixtral-12b-2409.yaml +++ b/api/core/model_runtime/model_providers/mistralai/llm/pixtral-12b-2409.yaml @@ -5,6 +5,7 @@ label: model_type: llm features: - agent-thought + - vision model_properties: mode: chat context_size: 128000 @@ -21,7 +22,7 @@ parameter_rules: max: 1 - name: max_tokens use_template: max_tokens - default: 1024 + default: 8192 min: 1 max: 8192 - name: safe_prompt diff --git a/api/core/model_runtime/model_providers/mistralai/llm/pixtral-large-2411.yaml b/api/core/model_runtime/model_providers/mistralai/llm/pixtral-large-2411.yaml new file mode 100644 index 0000000000..606c9aa331 --- /dev/null +++ b/api/core/model_runtime/model_providers/mistralai/llm/pixtral-large-2411.yaml @@ -0,0 +1,52 @@ +model: pixtral-large-2411 +label: + zh_Hans: pixtral-large-2411 + en_US: pixtral-large-2411 +model_type: llm +features: + - agent-thought + - vision +model_properties: + mode: chat + context_size: 128000 +parameter_rules: + - name: temperature + use_template: temperature + default: 0.7 + min: 0 + max: 1 + - name: top_p + use_template: top_p + default: 1 + min: 0 + max: 1 + - name: max_tokens + use_template: max_tokens + default: 8192 + min: 1 + max: 8192 + - name: safe_prompt + default: false + type: boolean + help: + en_US: Whether to inject a safety prompt before all conversations. + zh_Hans: 是否开启提示词审查 + label: + en_US: SafePrompt + zh_Hans: 提示词审查 + - name: random_seed + type: int + help: + en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. + zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 + label: + en_US: RandomSeed + zh_Hans: 随机数种子 + default: 0 + min: 0 + max: 2147483647 +pricing: + input: '0.008' + output: '0.024' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/mistralai/llm/pixtral-large-latest.yaml b/api/core/model_runtime/model_providers/mistralai/llm/pixtral-large-latest.yaml new file mode 100644 index 0000000000..4f0ed5ae5d --- /dev/null +++ b/api/core/model_runtime/model_providers/mistralai/llm/pixtral-large-latest.yaml @@ -0,0 +1,52 @@ +model: pixtral-large-latest +label: + zh_Hans: pixtral-large-latest + en_US: pixtral-large-latest +model_type: llm +features: + - agent-thought + - vision +model_properties: + mode: chat + context_size: 128000 +parameter_rules: + - name: temperature + use_template: temperature + default: 0.7 + min: 0 + max: 1 + - name: top_p + use_template: top_p + default: 1 + min: 0 + max: 1 + - name: max_tokens + use_template: max_tokens + default: 8192 + min: 1 + max: 8192 + - name: safe_prompt + default: false + type: boolean + help: + en_US: Whether to inject a safety prompt before all conversations. + zh_Hans: 是否开启提示词审查 + label: + en_US: SafePrompt + zh_Hans: 提示词审查 + - name: random_seed + type: int + help: + en_US: The seed to use for random sampling. If set, different calls will generate deterministic results. + zh_Hans: 当开启随机数种子以后,你可以通过指定一个固定的种子来使得回答结果更加稳定 + label: + en_US: RandomSeed + zh_Hans: 随机数种子 + default: 0 + min: 0 + max: 2147483647 +pricing: + input: '0.008' + output: '0.024' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/ollama/llm/llm.py b/api/core/model_runtime/model_providers/ollama/llm/llm.py index 094a674645..3ae728d4b3 100644 --- a/api/core/model_runtime/model_providers/ollama/llm/llm.py +++ b/api/core/model_runtime/model_providers/ollama/llm/llm.py @@ -181,9 +181,11 @@ class OllamaLargeLanguageModel(LargeLanguageModel): # prepare the payload for a simple ping to the model data = {"model": model, "stream": stream} - if "format" in model_parameters: - data["format"] = model_parameters["format"] - del model_parameters["format"] + if format_schema := model_parameters.pop("format", None): + try: + data["format"] = format_schema if format_schema == "json" else json.loads(format_schema) + except json.JSONDecodeError as e: + raise InvokeBadRequestError(f"Invalid format schema: {str(e)}") if "keep_alive" in model_parameters: data["keep_alive"] = model_parameters["keep_alive"] @@ -733,12 +735,12 @@ class OllamaLargeLanguageModel(LargeLanguageModel): ParameterRule( name="format", label=I18nObject(en_US="Format", zh_Hans="返回格式"), - type=ParameterType.STRING, + type=ParameterType.TEXT, + default="json", help=I18nObject( - en_US="the format to return a response in. Currently the only accepted value is json.", - zh_Hans="返回响应的格式。目前唯一接受的值是json。", + en_US="the format to return a response in. Format can be `json` or a JSON schema.", + zh_Hans="返回响应的格式。目前接受的值是字符串`json`或JSON schema.", ), - options=["json"], ), ], pricing=PriceConfig( diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py b/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py index 26c090d30e..8e07d56f45 100644 --- a/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py +++ b/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py @@ -478,6 +478,10 @@ class OAIAPICompatLargeLanguageModel(_CommonOaiApiCompat, LargeLanguageModel): usage=usage, ) break + # handle the error here. for issue #11629 + if chunk_json.get("error") and chunk_json.get("choices") is None: + raise ValueError(chunk_json.get("error")) + if chunk_json: if u := chunk_json.get("usage"): usage = u diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml index b52df3e4e3..8703a97edd 100644 --- a/api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml @@ -1,4 +1,5 @@ - Tencent/Hunyuan-A52B-Instruct +- Qwen/QwQ-32B-Preview - Qwen/Qwen2.5-72B-Instruct - Qwen/Qwen2.5-32B-Instruct - Qwen/Qwen2.5-14B-Instruct @@ -19,6 +20,7 @@ - 01-ai/Yi-1.5-6B-Chat - internlm/internlm2_5-20b-chat - internlm/internlm2_5-7b-chat +- meta-llama/Llama-3.3-70B-Instruct - meta-llama/Meta-Llama-3.1-405B-Instruct - meta-llama/Meta-Llama-3.1-70B-Instruct - meta-llama/Meta-Llama-3.1-8B-Instruct diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/meta-llama-3.3-70b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/meta-llama-3.3-70b-instruct.yaml new file mode 100644 index 0000000000..9373a8f4ca --- /dev/null +++ b/api/core/model_runtime/model_providers/siliconflow/llm/meta-llama-3.3-70b-instruct.yaml @@ -0,0 +1,53 @@ +model: meta-llama/Llama-3.3-70B-Instruct +label: + en_US: meta-llama/Llama-3.3-70B-Instruct +model_type: llm +features: + - agent-thought + - tool-call + - stream-tool-call +model_properties: + mode: chat + context_size: 32768 +parameter_rules: + - name: temperature + use_template: temperature + - name: max_tokens + use_template: max_tokens + type: int + default: 512 + min: 1 + max: 4096 + help: + zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 + en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + required: false + - name: frequency_penalty + use_template: frequency_penalty + - name: response_format + label: + zh_Hans: 回复格式 + en_US: Response Format + type: string + help: + zh_Hans: 指定模型必须输出的格式 + en_US: specifying the format that the model must output + required: false + options: + - text + - json_object +pricing: + input: '4.13' + output: '4.13' + unit: '0.000001' + currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen-qwq-32B-preview.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen-qwq-32B-preview.yaml new file mode 100644 index 0000000000..c949de4d75 --- /dev/null +++ b/api/core/model_runtime/model_providers/siliconflow/llm/qwen-qwq-32B-preview.yaml @@ -0,0 +1,53 @@ +model: Qwen/QwQ-32B-Preview +label: + en_US: Qwen/QwQ-32B-Preview +model_type: llm +features: + - agent-thought + - tool-call + - stream-tool-call +model_properties: + mode: chat + context_size: 32768 +parameter_rules: + - name: temperature + use_template: temperature + - name: max_tokens + use_template: max_tokens + type: int + default: 512 + min: 1 + max: 4096 + help: + zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 + en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + required: false + - name: frequency_penalty + use_template: frequency_penalty + - name: response_format + label: + zh_Hans: 回复格式 + en_US: Response Format + type: string + help: + zh_Hans: 指定模型必须输出的格式 + en_US: specifying the format that the model must output + required: false + options: + - text + - json_object +pricing: + input: '1.26' + output: '1.26' + unit: '0.000001' + currency: RMB diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max-0809.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max-0809.yaml index 94b6666d05..5970fec5e6 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max-0809.yaml +++ b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max-0809.yaml @@ -59,8 +59,6 @@ parameter_rules: help: zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: response_format - use_template: response_format - name: repetition_penalty required: false type: float diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max.yaml index b6172c1cbc..c5c8dc5f7c 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max.yaml +++ b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-max.yaml @@ -59,8 +59,6 @@ parameter_rules: help: zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: response_format - use_template: response_format - name: repetition_penalty required: false type: float diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0201.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0201.yaml index 03cb039d15..9d9d6c6d11 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0201.yaml +++ b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0201.yaml @@ -58,8 +58,6 @@ parameter_rules: help: zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: response_format - use_template: response_format - name: repetition_penalty required: false type: float diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0809.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0809.yaml index 0be4b68f4f..2fab6db648 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0809.yaml +++ b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus-0809.yaml @@ -59,8 +59,6 @@ parameter_rules: help: zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: response_format - use_template: response_format - name: repetition_penalty required: false type: float diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus.yaml index 6c8a8121c6..61820ca853 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus.yaml +++ b/api/core/model_runtime/model_providers/tongyi/llm/qwen-vl-plus.yaml @@ -59,8 +59,6 @@ parameter_rules: help: zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. - - name: response_format - use_template: response_format - name: repetition_penalty required: false type: float diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-2.0-flash-exp.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-2.0-flash-exp.yaml new file mode 100644 index 0000000000..bcd59623a7 --- /dev/null +++ b/api/core/model_runtime/model_providers/vertex_ai/llm/gemini-2.0-flash-exp.yaml @@ -0,0 +1,39 @@ +model: gemini-2.0-flash-exp +label: + en_US: Gemini 2.0 Flash Exp +model_type: llm +features: + - agent-thought + - vision + - tool-call + - stream-tool-call + - document +model_properties: + mode: chat + context_size: 1048576 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + required: false + - name: max_output_tokens + use_template: max_tokens + default: 8192 + min: 1 + max: 8192 + - name: json_schema + use_template: json_schema +pricing: + input: '0.00' + output: '0.00' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py b/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py index 1469de6055..934195cc3d 100644 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py +++ b/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py @@ -104,13 +104,14 @@ class VertexAiLargeLanguageModel(LargeLanguageModel): """ # use Anthropic official SDK references # - https://github.com/anthropics/anthropic-sdk-python - service_account_info = json.loads(base64.b64decode(credentials["vertex_service_account_key"])) + service_account_key = credentials.get("vertex_service_account_key", "") project_id = credentials["vertex_project_id"] SCOPES = ["https://www.googleapis.com/auth/cloud-platform"] token = "" # get access token from service account credential - if service_account_info: + if service_account_key: + service_account_info = json.loads(base64.b64decode(service_account_key)) credentials = service_account.Credentials.from_service_account_info(service_account_info, scopes=SCOPES) request = google.auth.transport.requests.Request() credentials.refresh(request) @@ -478,10 +479,11 @@ class VertexAiLargeLanguageModel(LargeLanguageModel): if stop: config_kwargs["stop_sequences"] = stop - service_account_info = json.loads(base64.b64decode(credentials["vertex_service_account_key"])) + service_account_key = credentials.get("vertex_service_account_key", "") project_id = credentials["vertex_project_id"] location = credentials["vertex_location"] - if service_account_info: + if service_account_key: + service_account_info = json.loads(base64.b64decode(service_account_key)) service_accountSA = service_account.Credentials.from_service_account_info(service_account_info) aiplatform.init(credentials=service_accountSA, project=project_id, location=location) else: diff --git a/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text_embedding.py index 9cd0c78d99..eb54941e08 100644 --- a/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text_embedding.py +++ b/api/core/model_runtime/model_providers/vertex_ai/text_embedding/text_embedding.py @@ -48,10 +48,11 @@ class VertexAiTextEmbeddingModel(_CommonVertexAi, TextEmbeddingModel): :param input_type: input type :return: embeddings result """ - service_account_info = json.loads(base64.b64decode(credentials["vertex_service_account_key"])) + service_account_key = credentials.get("vertex_service_account_key", "") project_id = credentials["vertex_project_id"] location = credentials["vertex_location"] - if service_account_info: + if service_account_key: + service_account_info = json.loads(base64.b64decode(service_account_key)) service_accountSA = service_account.Credentials.from_service_account_info(service_account_info) aiplatform.init(credentials=service_accountSA, project=project_id, location=location) else: @@ -100,10 +101,11 @@ class VertexAiTextEmbeddingModel(_CommonVertexAi, TextEmbeddingModel): :return: """ try: - service_account_info = json.loads(base64.b64decode(credentials["vertex_service_account_key"])) + service_account_key = credentials.get("vertex_service_account_key", "") project_id = credentials["vertex_project_id"] location = credentials["vertex_location"] - if service_account_info: + if service_account_key: + service_account_info = json.loads(base64.b64decode(service_account_key)) service_accountSA = service_account.Credentials.from_service_account_info(service_account_info) aiplatform.init(credentials=service_accountSA, project=project_id, location=location) else: diff --git a/api/core/model_runtime/model_providers/x/llm/grok-2-1212.yaml b/api/core/model_runtime/model_providers/x/llm/grok-2-1212.yaml new file mode 100644 index 0000000000..24ea716a98 --- /dev/null +++ b/api/core/model_runtime/model_providers/x/llm/grok-2-1212.yaml @@ -0,0 +1,66 @@ +model: grok-2-1212 +label: + en_US: grok-2-1212 +model_type: llm +features: + - agent-thought + - tool-call + - multi-tool-call + - stream-tool-call +model_properties: + mode: chat + context_size: 131072 +parameter_rules: + - name: temperature + label: + en_US: "Temperature" + zh_Hans: "采样温度" + type: float + default: 0.7 + min: 0.0 + max: 2.0 + precision: 1 + required: true + help: + en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_p + label: + en_US: "Top P" + zh_Hans: "Top P" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: frequency_penalty + use_template: frequency_penalty + label: + en_US: "Frequency Penalty" + zh_Hans: "频率惩罚" + type: float + default: 0 + min: 0 + max: 2.0 + precision: 1 + required: false + help: + en_US: "Number between 0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim." + zh_Hans: "介于0和2.0之间的数字。正值会根据新标记在文本中迄今为止的现有频率来惩罚它们,从而降低模型一字不差地重复同一句话的可能性。" + + - name: user + use_template: text + label: + en_US: "User" + zh_Hans: "用户" + type: string + required: false + help: + en_US: "Used to track and differentiate conversation requests from different users." + zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/x/llm/grok-2-vision-1212.yaml b/api/core/model_runtime/model_providers/x/llm/grok-2-vision-1212.yaml new file mode 100644 index 0000000000..f224fa5757 --- /dev/null +++ b/api/core/model_runtime/model_providers/x/llm/grok-2-vision-1212.yaml @@ -0,0 +1,64 @@ +model: grok-2-vision-1212 +label: + en_US: grok-2-vision-1212 +model_type: llm +features: + - agent-thought + - vision +model_properties: + mode: chat + context_size: 8192 +parameter_rules: + - name: temperature + label: + en_US: "Temperature" + zh_Hans: "采样温度" + type: float + default: 0.7 + min: 0.0 + max: 2.0 + precision: 1 + required: true + help: + en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: top_p + label: + en_US: "Top P" + zh_Hans: "Top P" + type: float + default: 0.7 + min: 0.0 + max: 1.0 + precision: 1 + required: true + help: + en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." + zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" + + - name: frequency_penalty + use_template: frequency_penalty + label: + en_US: "Frequency Penalty" + zh_Hans: "频率惩罚" + type: float + default: 0 + min: 0 + max: 2.0 + precision: 1 + required: false + help: + en_US: "Number between 0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim." + zh_Hans: "介于0和2.0之间的数字。正值会根据新标记在文本中迄今为止的现有频率来惩罚它们,从而降低模型一字不差地重复同一句话的可能性。" + + - name: user + use_template: text + label: + en_US: "User" + zh_Hans: "用户" + type: string + required: false + help: + en_US: "Used to track and differentiate conversation requests from different users." + zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/x/llm/grok-beta.yaml b/api/core/model_runtime/model_providers/x/llm/grok-beta.yaml index bb71de2bad..7f722539d9 100644 --- a/api/core/model_runtime/model_providers/x/llm/grok-beta.yaml +++ b/api/core/model_runtime/model_providers/x/llm/grok-beta.yaml @@ -1,6 +1,6 @@ model: grok-beta label: - en_US: Grok Beta + en_US: grok-beta model_type: llm features: - agent-thought diff --git a/api/core/model_runtime/model_providers/x/llm/grok-vision-beta.yaml b/api/core/model_runtime/model_providers/x/llm/grok-vision-beta.yaml index 844f0520bc..1d81282534 100644 --- a/api/core/model_runtime/model_providers/x/llm/grok-vision-beta.yaml +++ b/api/core/model_runtime/model_providers/x/llm/grok-vision-beta.yaml @@ -1,6 +1,6 @@ model: grok-vision-beta label: - en_US: Grok Vision Beta + en_US: grok-vision-beta model_type: llm features: - agent-thought diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/glm_4v_flash.yaml b/api/core/model_runtime/model_providers/zhipuai/llm/glm_4v_flash.yaml new file mode 100644 index 0000000000..c2047b2cd3 --- /dev/null +++ b/api/core/model_runtime/model_providers/zhipuai/llm/glm_4v_flash.yaml @@ -0,0 +1,52 @@ +model: glm-4v-flash +label: + en_US: glm-4v-flash +model_type: llm +model_properties: + mode: chat + context_size: 2048 +features: + - vision +parameter_rules: + - name: temperature + use_template: temperature + default: 0.95 + min: 0.0 + max: 1.0 + help: + zh_Hans: 采样温度,控制输出的随机性,必须为正数取值范围是:(0.0,1.0],不能等于 0,默认值为 0.95 值越大,会使输出更随机,更具创造性;值越小,输出会更加稳定或确定建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 + en_US: Sampling temperature, controls the randomness of the output, must be a positive number. The value range is (0.0,1.0], which cannot be equal to 0. The default value is 0.95. The larger the value, the more random and creative the output will be; the smaller the value, The output will be more stable or certain. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. + - name: top_p + use_template: top_p + default: 0.6 + help: + zh_Hans: 用温度取样的另一种方法,称为核取样取值范围是:(0.0, 1.0) 开区间,不能等于 0 或 1,默认值为 0.7 模型考虑具有 top_p 概率质量tokens的结果例如:0.1 意味着模型解码器只考虑从前 10% 的概率的候选集中取 tokens 建议您根据应用场景调整 top_p 或 temperature 参数,但不要同时调整两个参数。 + en_US: Another method of temperature sampling is called kernel sampling. The value range is (0.0, 1.0) open interval, which cannot be equal to 0 or 1. The default value is 0.7. The model considers the results with top_p probability mass tokens. For example 0.1 means The model decoder only considers tokens from the candidate set with the top 10% probability. It is recommended that you adjust the top_p or temperature parameters according to the application scenario, but do not adjust both parameters at the same time. + - name: do_sample + label: + zh_Hans: 采样策略 + en_US: Sampling strategy + type: boolean + help: + zh_Hans: do_sample 为 true 时启用采样策略,do_sample 为 false 时采样策略 temperature、top_p 将不生效。默认值为 true。 + en_US: When `do_sample` is set to true, the sampling strategy is enabled. When `do_sample` is set to false, the sampling strategies such as `temperature` and `top_p` will not take effect. The default value is true. + default: true + - name: max_tokens + use_template: max_tokens + default: 1024 + min: 1 + max: 1024 + - name: web_search + type: boolean + label: + zh_Hans: 联网搜索 + en_US: Web Search + default: false + help: + zh_Hans: 模型内置了互联网搜索服务,该参数控制模型在生成文本时是否参考使用互联网搜索结果。启用互联网搜索,模型会将搜索结果作为文本生成过程中的参考信息,但模型会基于其内部逻辑“自行判断”是否使用互联网搜索结果。 + en_US: The model has a built-in Internet search service. This parameter controls whether the model refers to Internet search results when generating text. When Internet search is enabled, the model will use the search results as reference information in the text generation process, but the model will "judge" whether to use Internet search results based on its internal logic. +pricing: + input: '0.00' + output: '0.00' + unit: '0.000001' + currency: RMB diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/llm.py b/api/core/model_runtime/model_providers/zhipuai/llm/llm.py index e0601d681c..59861507e4 100644 --- a/api/core/model_runtime/model_providers/zhipuai/llm/llm.py +++ b/api/core/model_runtime/model_providers/zhipuai/llm/llm.py @@ -144,7 +144,7 @@ class ZhipuAILargeLanguageModel(_CommonZhipuaiAI, LargeLanguageModel): if copy_prompt_message.role in {PromptMessageRole.USER, PromptMessageRole.SYSTEM, PromptMessageRole.TOOL}: if isinstance(copy_prompt_message.content, list): # check if model is 'glm-4v' - if model not in {"glm-4v", "glm-4v-plus"}: + if not model.startswith("glm-4v"): # not support list message continue # get image and @@ -188,7 +188,7 @@ class ZhipuAILargeLanguageModel(_CommonZhipuaiAI, LargeLanguageModel): else: model_parameters["tools"] = [web_search_params] - if model in {"glm-4v", "glm-4v-plus"}: + if model.startswith("glm-4v"): params = self._construct_glm_4v_parameter(model, new_prompt_messages, model_parameters) else: params = {"model": model, "messages": [], **model_parameters} @@ -412,6 +412,8 @@ class ZhipuAILargeLanguageModel(_CommonZhipuaiAI, LargeLanguageModel): human_prompt = "\n\nHuman:" ai_prompt = "\n\nAssistant:" content = message.content + if isinstance(content, list): + content = "".join(c.data for c in content if c.type == PromptMessageContentType.TEXT) if isinstance(message, UserPromptMessage): message_text = f"{human_prompt} {content}" diff --git a/api/core/ops/langfuse_trace/langfuse_trace.py b/api/core/ops/langfuse_trace/langfuse_trace.py index 0cba40c51a..29fdebd8fe 100644 --- a/api/core/ops/langfuse_trace/langfuse_trace.py +++ b/api/core/ops/langfuse_trace/langfuse_trace.py @@ -4,7 +4,7 @@ import os from datetime import datetime, timedelta from typing import Optional -from langfuse import Langfuse +from langfuse import Langfuse # type: ignore from core.ops.base_trace_instance import BaseTraceInstance from core.ops.entities.config_entity import LangfuseConfig @@ -65,8 +65,11 @@ class LangFuseDataTrace(BaseTraceInstance): self.generate_name_trace(trace_info) def workflow_trace(self, trace_info: WorkflowTraceInfo): - trace_id = trace_info.workflow_app_log_id or trace_info.workflow_run_id + trace_id = trace_info.workflow_run_id user_id = trace_info.metadata.get("user_id") + metadata = trace_info.metadata + metadata["workflow_app_log_id"] = trace_info.workflow_app_log_id + if trace_info.message_id: trace_id = trace_info.message_id name = TraceTaskName.MESSAGE_TRACE.value @@ -76,22 +79,20 @@ class LangFuseDataTrace(BaseTraceInstance): name=name, input=trace_info.workflow_run_inputs, output=trace_info.workflow_run_outputs, - metadata=trace_info.metadata, + metadata=metadata, session_id=trace_info.conversation_id, tags=["message", "workflow"], - created_at=trace_info.start_time, - updated_at=trace_info.end_time, ) self.add_trace(langfuse_trace_data=trace_data) workflow_span_data = LangfuseSpan( - id=(trace_info.workflow_app_log_id or trace_info.workflow_run_id), + id=trace_info.workflow_run_id, name=TraceTaskName.WORKFLOW_TRACE.value, input=trace_info.workflow_run_inputs, output=trace_info.workflow_run_outputs, trace_id=trace_id, start_time=trace_info.start_time, end_time=trace_info.end_time, - metadata=trace_info.metadata, + metadata=metadata, level=LevelEnum.DEFAULT if trace_info.error == "" else LevelEnum.ERROR, status_message=trace_info.error or "", ) @@ -103,7 +104,7 @@ class LangFuseDataTrace(BaseTraceInstance): name=TraceTaskName.WORKFLOW_TRACE.value, input=trace_info.workflow_run_inputs, output=trace_info.workflow_run_outputs, - metadata=trace_info.metadata, + metadata=metadata, session_id=trace_info.conversation_id, tags=["workflow"], ) @@ -192,7 +193,7 @@ class LangFuseDataTrace(BaseTraceInstance): metadata=metadata, level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR), status_message=trace_info.error or "", - parent_observation_id=(trace_info.workflow_app_log_id or trace_info.workflow_run_id), + parent_observation_id=trace_info.workflow_run_id, ) else: span_data = LangfuseSpan( @@ -239,11 +240,13 @@ class LangFuseDataTrace(BaseTraceInstance): file_list = trace_info.file_list metadata = trace_info.metadata message_data = trace_info.message_data + if message_data is None: + return message_id = message_data.id user_id = message_data.from_account_id if message_data.from_end_user_id: - end_user_data: EndUser = ( + end_user_data: Optional[EndUser] = ( db.session.query(EndUser).filter(EndUser.id == message_data.from_end_user_id).first() ) if end_user_data is not None: @@ -300,6 +303,8 @@ class LangFuseDataTrace(BaseTraceInstance): self.add_generation(langfuse_generation_data) def moderation_trace(self, trace_info: ModerationTraceInfo): + if trace_info.message_data is None: + return span_data = LangfuseSpan( name=TraceTaskName.MODERATION_TRACE.value, input=trace_info.inputs, @@ -319,9 +324,11 @@ class LangFuseDataTrace(BaseTraceInstance): def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo): message_data = trace_info.message_data + if message_data is None: + return generation_usage = GenerationUsage( total=len(str(trace_info.suggested_question)), - input=len(trace_info.inputs), + input=len(trace_info.inputs) if trace_info.inputs else 0, output=len(trace_info.suggested_question), unit=UnitEnum.CHARACTERS, ) @@ -342,6 +349,8 @@ class LangFuseDataTrace(BaseTraceInstance): self.add_generation(langfuse_generation_data=generation_data) def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo): + if trace_info.message_data is None: + return dataset_retrieval_span_data = LangfuseSpan( name=TraceTaskName.DATASET_RETRIEVAL_TRACE.value, input=trace_info.inputs, diff --git a/api/core/ops/langsmith_trace/langsmith_trace.py b/api/core/ops/langsmith_trace/langsmith_trace.py index c15b132abd..672843e5a8 100644 --- a/api/core/ops/langsmith_trace/langsmith_trace.py +++ b/api/core/ops/langsmith_trace/langsmith_trace.py @@ -62,15 +62,17 @@ class LangSmithDataTrace(BaseTraceInstance): self.generate_name_trace(trace_info) def workflow_trace(self, trace_info: WorkflowTraceInfo): - trace_id = trace_info.message_id or trace_info.workflow_app_log_id or trace_info.workflow_run_id + trace_id = trace_info.message_id or trace_info.workflow_run_id message_dotted_order = ( generate_dotted_order(trace_info.message_id, trace_info.start_time) if trace_info.message_id else None ) workflow_dotted_order = generate_dotted_order( - trace_info.workflow_app_log_id or trace_info.workflow_run_id, + trace_info.workflow_run_id, trace_info.workflow_data.created_at, message_dotted_order, ) + metadata = trace_info.metadata + metadata["workflow_app_log_id"] = trace_info.workflow_app_log_id if trace_info.message_id: message_run = LangSmithRunModel( @@ -82,7 +84,7 @@ class LangSmithDataTrace(BaseTraceInstance): start_time=trace_info.start_time, end_time=trace_info.end_time, extra={ - "metadata": trace_info.metadata, + "metadata": metadata, }, tags=["message", "workflow"], error=trace_info.error, @@ -94,7 +96,7 @@ class LangSmithDataTrace(BaseTraceInstance): langsmith_run = LangSmithRunModel( file_list=trace_info.file_list, total_tokens=trace_info.total_tokens, - id=trace_info.workflow_app_log_id or trace_info.workflow_run_id, + id=trace_info.workflow_run_id, name=TraceTaskName.WORKFLOW_TRACE.value, inputs=trace_info.workflow_run_inputs, run_type=LangSmithRunType.tool, @@ -102,7 +104,7 @@ class LangSmithDataTrace(BaseTraceInstance): end_time=trace_info.workflow_data.finished_at, outputs=trace_info.workflow_run_outputs, extra={ - "metadata": trace_info.metadata, + "metadata": metadata, }, error=trace_info.error, tags=["workflow"], @@ -204,7 +206,7 @@ class LangSmithDataTrace(BaseTraceInstance): extra={ "metadata": metadata, }, - parent_run_id=trace_info.workflow_app_log_id or trace_info.workflow_run_id, + parent_run_id=trace_info.workflow_run_id, tags=["node_execution"], id=node_execution_id, trace_id=trace_id, diff --git a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py index 08234c0c91..0854b316e5 100644 --- a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py +++ b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py @@ -1,13 +1,10 @@ import copy import json import logging -from collections.abc import Iterable from typing import Any, Optional from opensearchpy import OpenSearch -from opensearchpy.helpers import bulk from pydantic import BaseModel, model_validator -from tenacity import retry, stop_after_attempt, wait_fixed from configs import dify_config from core.rag.datasource.vdb.field import Field @@ -24,11 +21,15 @@ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") logging.getLogger("lindorm").setLevel(logging.WARN) +ROUTING_FIELD = "routing_field" +UGC_INDEX_PREFIX = "ugc_index" + class LindormVectorStoreConfig(BaseModel): hosts: str username: Optional[str] = None password: Optional[str] = None + using_ugc: Optional[bool] = False @model_validator(mode="before") @classmethod @@ -42,9 +43,7 @@ class LindormVectorStoreConfig(BaseModel): return values def to_opensearch_params(self) -> dict[str, Any]: - params = { - "hosts": self.hosts, - } + params = {"hosts": self.hosts} if self.username and self.password: params["http_auth"] = (self.username, self.password) return params @@ -52,9 +51,21 @@ class LindormVectorStoreConfig(BaseModel): class LindormVectorStore(BaseVector): def __init__(self, collection_name: str, config: LindormVectorStoreConfig, **kwargs): - super().__init__(collection_name.lower()) + self._routing = None + self._routing_field = None + if config.using_ugc: + routing_value: str = kwargs.get("routing_value") + if routing_value is None: + raise ValueError("UGC index should init vector with valid 'routing_value' parameter value") + self._routing = routing_value.lower() + self._routing_field = ROUTING_FIELD + ugc_index_name = collection_name + super().__init__(ugc_index_name.lower()) + else: + super().__init__(collection_name.lower()) self._client_config = config self._client = OpenSearch(**config.to_opensearch_params()) + self._using_ugc = config.using_ugc self.kwargs = kwargs def get_type(self) -> str: @@ -67,94 +78,37 @@ class LindormVectorStore(BaseVector): def refresh(self): self._client.indices.refresh(index=self._collection_name) - def __filter_existed_ids( - self, - texts: list[str], - metadatas: list[dict], - ids: list[str], - bulk_size: int = 1024, - ) -> tuple[Iterable[str], Optional[list[dict]], Optional[list[str]]]: - @retry(stop=stop_after_attempt(3), wait=wait_fixed(60)) - def __fetch_existing_ids(batch_ids: list[str]) -> set[str]: - try: - existing_docs = self._client.mget(index=self._collection_name, body={ - "ids": batch_ids}, _source=False) - return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} - except Exception as e: - logger.exception(f"Error fetching batch {batch_ids}") - return set() - - @retry(stop=stop_after_attempt(3), wait=wait_fixed(60)) - def __fetch_existing_routing_ids(batch_ids: list[str], route_ids: list[str]) -> set[str]: - try: - existing_docs = self._client.mget( - body={ - "docs": [ - {"_index": self._collection_name, - "_id": id, "routing": routing} - for id, routing in zip(batch_ids, route_ids) - ] - }, - _source=False, - ) - return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} - except Exception as e: - logger.exception(f"Error fetching batch ids: {batch_ids}") - return set() - - if ids is None: - return texts, metadatas, ids - - if len(texts) != len(ids): - raise RuntimeError(f"texts {len(texts)} != {ids}") - - filtered_texts = [] - filtered_metadatas = [] - filtered_ids = [] - - def batch(iterable, n): - length = len(iterable) - for idx in range(0, length, n): - yield iterable[idx: min(idx + n, length)] - - for ids_batch, texts_batch, metadatas_batch in zip( - batch(ids, bulk_size), - batch(texts, bulk_size), - batch(metadatas, bulk_size) if metadatas is not None else batch( - [None] * len(ids), bulk_size), - ): - existing_ids_set = __fetch_existing_ids(ids_batch) - for text, metadata, doc_id in zip(texts_batch, metadatas_batch, ids_batch): - if doc_id not in existing_ids_set: - filtered_texts.append(text) - filtered_ids.append(doc_id) - if metadatas is not None: - filtered_metadatas.append(metadata) - - return filtered_texts, metadatas if metadatas is None else filtered_metadatas, filtered_ids - def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): actions = [] uuids = self._get_uuids(documents) for i in range(len(documents)): - action = { - "_op_type": "index", - "_index": self._collection_name.lower(), - "_id": uuids[i], - "_source": { - Field.CONTENT_KEY.value: documents[i].page_content, - # Make sure you pass an array here - Field.VECTOR.value: embeddings[i], - Field.METADATA_KEY.value: documents[i].metadata, - }, + action_header = { + "index": { + "_index": self.collection_name.lower(), + "_id": uuids[i], + } } - actions.append(action) - bulk(self._client, actions) - self.refresh() + action_values = { + Field.CONTENT_KEY.value: documents[i].page_content, + Field.VECTOR.value: embeddings[i], # Make sure you pass an array here + Field.METADATA_KEY.value: documents[i].metadata, + } + if self._using_ugc: + action_header["index"]["routing"] = self._routing + action_values[self._routing_field] = self._routing + actions.append(action_header) + actions.append(action_values) + response = self._client.bulk(actions) + if response["errors"]: + for item in response["items"]: + print(f"{item['index']['status']}: {item['index']['error']['type']}") + else: + self.refresh() def get_ids_by_metadata_field(self, key: str, value: str): - query = { - "query": {"term": {f"{Field.METADATA_KEY.value}.{key}.keyword": value}}} + query = {"query": {"bool": {"must": [{"term": {f"{Field.METADATA_KEY.value}.{key}.keyword": value}}]}}} + if self._using_ugc: + query["query"]["bool"]["must"].append({"term": {f"{self._routing_field}.keyword": self._routing}}) response = self._client.search(index=self._collection_name, body=query) if response["hits"]["hits"]: return [hit["_id"] for hit in response["hits"]["hits"]] @@ -162,47 +116,54 @@ class LindormVectorStore(BaseVector): return None def delete_by_metadata_field(self, key: str, value: str): - query_str = {"query": {"match": {f"metadata.{key}": f"{value}"}}} - results = self._client.search( - index=self._collection_name, body=query_str) - ids = [hit["_id"] for hit in results["hits"]["hits"]] + ids = self.get_ids_by_metadata_field(key, value) if ids: self.delete_by_ids(ids) def delete_by_ids(self, ids: list[str]) -> None: + params = {} + if self._using_ugc: + params["routing"] = self._routing for id in ids: - if self._client.exists(index=self._collection_name, id=id): - self._client.delete(index=self._collection_name, id=id) + if self._client.exists(index=self._collection_name, id=id, params=params): + params = {} + if self._using_ugc: + params["routing"] = self._routing + self._client.delete(index=self._collection_name, id=id, params=params) + self.refresh() else: logger.warning( f"DELETE BY ID: ID {id} does not exist in the index.") def delete(self) -> None: - try: + if self._using_ugc: + routing_filter_query = { + "query": {"bool": {"must": [{"term": {f"{self._routing_field}.keyword": self._routing}}]}} + } + self._client.delete_by_query(self._collection_name, body=routing_filter_query) + self.refresh() + else: if self._client.indices.exists(index=self._collection_name): self._client.indices.delete( index=self._collection_name, params={"timeout": 60}) logger.info("Delete index success") else: - logger.warning( - f"Index '{self._collection_name}' does not exist. No deletion performed.") - except Exception as e: - logger.exception(f"Error occurred while deleting the index: {self._collection_name}") - raise e + logger.warning(f"Index '{self._collection_name}' does not exist. No deletion performed.") def text_exists(self, id: str) -> bool: try: - self._client.get(index=self._collection_name, id=id) + params = {} + if self._using_ugc: + params["routing"] = self._routing + self._client.get(index=self._collection_name, id=id, params=params) return True except: return False def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: - # Make sure query_vector is a list if not isinstance(query_vector, list): raise ValueError("query_vector should be a list of floats") - # Check whether query_vector is a floating-point number list if not all(isinstance(x, float) for x in query_vector): raise ValueError("All elements in query_vector should be floats") @@ -210,8 +171,10 @@ class LindormVectorStore(BaseVector): query = default_vector_search_query( query_vector=query_vector, k=top_k, **kwargs) try: - response = self._client.search( - index=self._collection_name, body=query) + params = {} + if self._using_ugc: + params["routing"] = self._routing + response = self._client.search(index=self._collection_name, body=query, params=params) except Exception as e: logger.exception(f"Error executing vector search, query: {query}") raise @@ -244,7 +207,7 @@ class LindormVectorStore(BaseVector): minimum_should_match = kwargs.get("minimum_should_match", 0) top_k = kwargs.get("top_k", 10) filters = kwargs.get("filter") - routing = kwargs.get("routing") + routing = self._routing full_text_query = default_text_search_query( query_text=query, k=top_k, @@ -255,6 +218,7 @@ class LindormVectorStore(BaseVector): minimum_should_match=minimum_should_match, filters=filters, routing=routing, + routing_field=self._routing_field, ) response = self._client.search( index=self._collection_name, body=full_text_query) @@ -279,17 +243,18 @@ class LindormVectorStore(BaseVector): f"Collection {self._collection_name} already exists.") return if self._client.indices.exists(index=self._collection_name): - logger.info("{self._collection_name.lower()} already exists.") + logger.info(f"{self._collection_name.lower()} already exists.") + redis_client.set(collection_exist_cache_key, 1, ex=3600) return if len(self.kwargs) == 0 and len(kwargs) != 0: self.kwargs = copy.deepcopy(kwargs) vector_field = kwargs.pop("vector_field", Field.VECTOR.value) - shards = kwargs.pop("shards", 2) + shards = kwargs.pop("shards", 4) engine = kwargs.pop("engine", "lvector") - method_name = kwargs.pop("method_name", "hnsw") + method_name = kwargs.pop("method_name", dify_config.DEFAULT_INDEX_TYPE) + space_type = kwargs.pop("space_type", dify_config.DEFAULT_DISTANCE_TYPE) data_type = kwargs.pop("data_type", "float") - space_type = kwargs.pop("space_type", "cosinesimil") hnsw_m = kwargs.pop("hnsw_m", 24) hnsw_ef_construction = kwargs.pop("hnsw_ef_construction", 500) @@ -305,10 +270,10 @@ class LindormVectorStore(BaseVector): mapping = default_text_mapping( dimension, method_name, + space_type=space_type, shards=shards, engine=engine, data_type=data_type, - space_type=space_type, vector_field=vector_field, hnsw_m=hnsw_m, hnsw_ef_construction=hnsw_ef_construction, @@ -318,6 +283,7 @@ class LindormVectorStore(BaseVector): centroids_hnsw_m=centroids_hnsw_m, centroids_hnsw_ef_construct=centroids_hnsw_ef_construct, centroids_hnsw_ef_search=centroids_hnsw_ef_search, + using_ugc=self._using_ugc, **kwargs, ) self._client.indices.create( @@ -327,15 +293,20 @@ class LindormVectorStore(BaseVector): def default_text_mapping(dimension: int, method_name: str, **kwargs: Any) -> dict: - routing_field = kwargs.get("routing_field") excludes_from_source = kwargs.get("excludes_from_source") analyzer = kwargs.get("analyzer", "ik_max_word") text_field = kwargs.get("text_field", Field.CONTENT_KEY.value) engine = kwargs["engine"] shard = kwargs["shards"] - space_type = kwargs["space_type"] + space_type = kwargs.get("space_type") + if space_type is None: + if method_name == "hnsw": + space_type = "l2" + else: + space_type = "cosine" data_type = kwargs["data_type"] vector_field = kwargs.get("vector_field", Field.VECTOR.value) + using_ugc = kwargs.get("using_ugc", False) if method_name == "ivfpq": ivfpq_m = kwargs["ivfpq_m"] @@ -385,13 +356,11 @@ def default_text_mapping(dimension: int, method_name: str, **kwargs: Any) -> dic # e.g. {"excludes": ["vector_field"]} mapping["mappings"]["_source"] = {"excludes": excludes_from_source} - if method_name == "ivfpq" and routing_field is not None: + if using_ugc and method_name == "ivfpq": mapping["settings"]["index"]["knn_routing"] = True mapping["settings"]["index"]["knn.offline.construction"] = True - - if method_name == "flat" and routing_field is not None: + elif using_ugc and method_name == "hnsw" or using_ugc and method_name == "flat": mapping["settings"]["index"]["knn_routing"] = True - return mapping @@ -405,14 +374,12 @@ def default_text_search_query( minimum_should_match: int = 0, filters: Optional[list[dict]] = None, routing: Optional[str] = None, + routing_field: Optional[str] = None, **kwargs, ) -> dict: if routing is not None: - routing_field = kwargs.get("routing_field", "routing_field") query_clause = { - "bool": { - "must": [{"match": {text_field: query_text}}, {"term": {f"metadata.{routing_field}.keyword": routing}}] - } + "bool": {"must": [{"match": {text_field: query_text}}, {"term": {f"{routing_field}.keyword": routing}}]} } else: query_clause = {"match": {text_field: query_text}} @@ -472,7 +439,7 @@ def default_vector_search_query( ) -> dict: if filters is not None: filter_type = "post_filter" if filter_type is None else filter_type - if not isinstance(filter, list): + if not isinstance(filters, list): raise RuntimeError(f"unexpected filter with {type(filters)}") final_ext = {"lvector": {}} if min_score != "0.0": @@ -508,17 +475,40 @@ def default_vector_search_query( class LindormVectorStoreFactory(AbstractVectorFactory): def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> LindormVectorStore: - if dataset.index_struct_dict: - class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] - collection_name = class_prefix - else: - dataset_id = dataset.id - collection_name = Dataset.gen_collection_name_by_id(dataset_id) - dataset.index_struct = json.dumps( - self.gen_index_struct_dict(VectorType.LINDORM, collection_name)) lindorm_config = LindormVectorStoreConfig( hosts=dify_config.LINDORM_URL, username=dify_config.LINDORM_USERNAME, password=dify_config.LINDORM_PASSWORD, + using_ugc=dify_config.USING_UGC_INDEX, ) - return LindormVectorStore(collection_name, lindorm_config) + using_ugc = dify_config.USING_UGC_INDEX + routing_value = None + if dataset.index_struct: + if using_ugc: + dimension = dataset.index_struct_dict["dimension"] + index_type = dataset.index_struct_dict["index_type"] + distance_type = dataset.index_struct_dict["distance_type"] + index_name = f"{UGC_INDEX_PREFIX}_{dimension}_{index_type}_{distance_type}" + routing_value = dataset.index_struct_dict["vector_store"]["class_prefix"] + else: + index_name = dataset.index_struct_dict["vector_store"]["class_prefix"] + else: + embedding_vector = embeddings.embed_query("hello word") + dimension = len(embedding_vector) + index_type = dify_config.DEFAULT_INDEX_TYPE + distance_type = dify_config.DEFAULT_DISTANCE_TYPE + class_prefix = Dataset.gen_collection_name_by_id(dataset.id) + index_struct_dict = { + "type": VectorType.LINDORM, + "vector_store": {"class_prefix": class_prefix}, + "index_type": index_type, + "dimension": dimension, + "distance_type": distance_type, + } + dataset.index_struct = json.dumps(index_struct_dict) + if using_ugc: + index_name = f"{UGC_INDEX_PREFIX}_{dimension}_{index_type}_{distance_type}" + routing_value = class_prefix + else: + index_name = class_prefix + return LindormVectorStore(index_name, lindorm_config, routing_value=routing_value) diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py index a6f3ad7fef..8dd5922ad0 100644 --- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py @@ -162,7 +162,7 @@ class TidbService: clusters = [] tidb_serverless_list_map = {item.cluster_id: item for item in tidb_serverless_list} cluster_ids = [item.cluster_id for item in tidb_serverless_list] - params = {"clusterIds": cluster_ids, "view": "FULL"} + params = {"clusterIds": cluster_ids, "view": "BASIC"} response = requests.get( f"{api_url}/clusters:batchGet", params=params, auth=HTTPDigestAuth(public_key, private_key) ) diff --git a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py index 1147e35ce8..39ab6ea71e 100644 --- a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py +++ b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py @@ -37,8 +37,6 @@ class TiDBVectorConfig(BaseModel): raise ValueError("config TIDB_VECTOR_PORT is required") if not values["user"]: raise ValueError("config TIDB_VECTOR_USER is required") - if not values["password"]: - raise ValueError("config TIDB_VECTOR_PASSWORD is required") if not values["database"]: raise ValueError("config TIDB_VECTOR_DATABASE is required") if not values["program_name"]: diff --git a/api/core/rag/extractor/extract_processor.py b/api/core/rag/extractor/extract_processor.py index a0b1aa4cef..69659e3108 100644 --- a/api/core/rag/extractor/extract_processor.py +++ b/api/core/rag/extractor/extract_processor.py @@ -103,7 +103,7 @@ class ExtractProcessor: extractor = ExcelExtractor(file_path) elif file_extension == ".pdf": extractor = PdfExtractor(file_path) - elif file_extension in {".md", ".markdown"}: + elif file_extension in {".md", ".markdown", ".mdx"}: extractor = ( UnstructuredMarkdownExtractor(file_path, unstructured_api_url, unstructured_api_key) if is_automatic @@ -141,7 +141,7 @@ class ExtractProcessor: extractor = ExcelExtractor(file_path) elif file_extension == ".pdf": extractor = PdfExtractor(file_path) - elif file_extension in {".md", ".markdown"}: + elif file_extension in {".md", ".markdown", ".mdx"}: extractor = MarkdownExtractor(file_path, autodetect_encoding=True) elif file_extension in {".htm", ".html"}: extractor = HtmlExtractor(file_path) diff --git a/api/core/tools/README_JP.md b/api/core/tools/README_JA.md similarity index 100% rename from api/core/tools/README_JP.md rename to api/core/tools/README_JA.md diff --git a/api/core/tools/tool/api_tool.py b/api/core/tools/tool/api_tool.py index 0b4c5bd2c6..636debffd4 100644 --- a/api/core/tools/tool/api_tool.py +++ b/api/core/tools/tool/api_tool.py @@ -270,9 +270,6 @@ class ApiTool(Tool): elif property["type"] == "object" or property["type"] == "array": if isinstance(value, str): try: - # an array str like '[1,2]' also can convert to list [1,2] through json.loads - # json not support single quote, but we can support it - value = value.replace("'", '"') return json.loads(value) except ValueError: return value diff --git a/api/core/workflow/callbacks/workflow_logging_callback.py b/api/core/workflow/callbacks/workflow_logging_callback.py index 17913de7b0..ed737e7316 100644 --- a/api/core/workflow/callbacks/workflow_logging_callback.py +++ b/api/core/workflow/callbacks/workflow_logging_callback.py @@ -4,6 +4,7 @@ from core.model_runtime.utils.encoders import jsonable_encoder from core.workflow.graph_engine.entities.event import ( GraphEngineEvent, GraphRunFailedEvent, + GraphRunPartialSucceededEvent, GraphRunStartedEvent, GraphRunSucceededEvent, IterationRunFailedEvent, @@ -39,6 +40,8 @@ class WorkflowLoggingCallback(WorkflowCallback): self.print_text("\n[GraphRunStartedEvent]", color="pink") elif isinstance(event, GraphRunSucceededEvent): self.print_text("\n[GraphRunSucceededEvent]", color="green") + elif isinstance(event, GraphRunPartialSucceededEvent): + self.print_text("\n[GraphRunPartialSucceededEvent]", color="pink") elif isinstance(event, GraphRunFailedEvent): self.print_text(f"\n[GraphRunFailedEvent] reason: {event.error}", color="red") elif isinstance(event, NodeRunStartedEvent): diff --git a/api/core/workflow/entities/node_entities.py b/api/core/workflow/entities/node_entities.py index e174d3baa0..976a5ef74e 100644 --- a/api/core/workflow/entities/node_entities.py +++ b/api/core/workflow/entities/node_entities.py @@ -25,6 +25,7 @@ class NodeRunMetadataKey(StrEnum): PARENT_PARALLEL_START_NODE_ID = "parent_parallel_start_node_id" PARALLEL_MODE_RUN_ID = "parallel_mode_run_id" ITERATION_DURATION_MAP = "iteration_duration_map" # single iteration duration if iteration node runs + ERROR_STRATEGY = "error_strategy" # node in continue on error mode return the field class NodeRunResult(BaseModel): @@ -43,3 +44,4 @@ class NodeRunResult(BaseModel): edge_source_handle: Optional[str] = None # source handle id of node with multiple branches error: Optional[str] = None # error message if status is failed + error_type: Optional[str] = None # error type if status is failed diff --git a/api/core/workflow/graph_engine/entities/event.py b/api/core/workflow/graph_engine/entities/event.py index 3736e632c3..73450349de 100644 --- a/api/core/workflow/graph_engine/entities/event.py +++ b/api/core/workflow/graph_engine/entities/event.py @@ -1,3 +1,4 @@ +from collections.abc import Mapping from datetime import datetime from typing import Any, Optional @@ -32,6 +33,12 @@ class GraphRunSucceededEvent(BaseGraphEvent): class GraphRunFailedEvent(BaseGraphEvent): error: str = Field(..., description="failed reason") + exceptions_count: Optional[int] = Field(description="exception count", default=0) + + +class GraphRunPartialSucceededEvent(BaseGraphEvent): + exceptions_count: int = Field(..., description="exception count") + outputs: Optional[dict[str, Any]] = None ########################################### @@ -82,6 +89,10 @@ class NodeRunFailedEvent(BaseNodeEvent): error: str = Field(..., description="error") +class NodeRunExceptionEvent(BaseNodeEvent): + error: str = Field(..., description="error") + + class NodeInIterationFailedEvent(BaseNodeEvent): error: str = Field(..., description="error") @@ -140,8 +151,8 @@ class BaseIterationEvent(GraphEngineEvent): class IterationRunStartedEvent(BaseIterationEvent): start_at: datetime = Field(..., description="start at") - inputs: Optional[dict[str, Any]] = None - metadata: Optional[dict[str, Any]] = None + inputs: Optional[Mapping[str, Any]] = None + metadata: Optional[Mapping[str, Any]] = None predecessor_node_id: Optional[str] = None @@ -153,18 +164,18 @@ class IterationRunNextEvent(BaseIterationEvent): class IterationRunSucceededEvent(BaseIterationEvent): start_at: datetime = Field(..., description="start at") - inputs: Optional[dict[str, Any]] = None - outputs: Optional[dict[str, Any]] = None - metadata: Optional[dict[str, Any]] = None + inputs: Optional[Mapping[str, Any]] = None + outputs: Optional[Mapping[str, Any]] = None + metadata: Optional[Mapping[str, Any]] = None steps: int = 0 iteration_duration_map: Optional[dict[str, float]] = None class IterationRunFailedEvent(BaseIterationEvent): start_at: datetime = Field(..., description="start at") - inputs: Optional[dict[str, Any]] = None - outputs: Optional[dict[str, Any]] = None - metadata: Optional[dict[str, Any]] = None + inputs: Optional[Mapping[str, Any]] = None + outputs: Optional[Mapping[str, Any]] = None + metadata: Optional[Mapping[str, Any]] = None steps: int = 0 error: str = Field(..., description="failed reason") diff --git a/api/core/workflow/graph_engine/entities/graph.py b/api/core/workflow/graph_engine/entities/graph.py index d87c039409..4f7bc60e26 100644 --- a/api/core/workflow/graph_engine/entities/graph.py +++ b/api/core/workflow/graph_engine/entities/graph.py @@ -64,13 +64,21 @@ class Graph(BaseModel): edge_configs = graph_config.get("edges") if edge_configs is None: edge_configs = [] + # node configs + node_configs = graph_config.get("nodes") + if not node_configs: + raise ValueError("Graph must have at least one node") edge_configs = cast(list, edge_configs) + node_configs = cast(list, node_configs) # reorganize edges mapping edge_mapping: dict[str, list[GraphEdge]] = {} reverse_edge_mapping: dict[str, list[GraphEdge]] = {} target_edge_ids = set() + fail_branch_source_node_id = [ + node["id"] for node in node_configs if node["data"].get("error_strategy") == "fail-branch" + ] for edge_config in edge_configs: source_node_id = edge_config.get("source") if not source_node_id: @@ -90,8 +98,16 @@ class Graph(BaseModel): # parse run condition run_condition = None - if edge_config.get("sourceHandle") and edge_config.get("sourceHandle") != "source": - run_condition = RunCondition(type="branch_identify", branch_identify=edge_config.get("sourceHandle")) + if edge_config.get("sourceHandle"): + if ( + edge_config.get("source") in fail_branch_source_node_id + and edge_config.get("sourceHandle") != "fail-branch" + ): + run_condition = RunCondition(type="branch_identify", branch_identify="success-branch") + elif edge_config.get("sourceHandle") != "source": + run_condition = RunCondition( + type="branch_identify", branch_identify=edge_config.get("sourceHandle") + ) graph_edge = GraphEdge( source_node_id=source_node_id, target_node_id=target_node_id, run_condition=run_condition @@ -100,13 +116,6 @@ class Graph(BaseModel): edge_mapping[source_node_id].append(graph_edge) reverse_edge_mapping[target_node_id].append(graph_edge) - # node configs - node_configs = graph_config.get("nodes") - if not node_configs: - raise ValueError("Graph must have at least one node") - - node_configs = cast(list, node_configs) - # fetch nodes that have no predecessor node root_node_configs = [] all_node_id_config_mapping: dict[str, dict] = {} diff --git a/api/core/workflow/graph_engine/entities/runtime_route_state.py b/api/core/workflow/graph_engine/entities/runtime_route_state.py index baeec9bf01..7683dcc9dc 100644 --- a/api/core/workflow/graph_engine/entities/runtime_route_state.py +++ b/api/core/workflow/graph_engine/entities/runtime_route_state.py @@ -15,6 +15,7 @@ class RouteNodeState(BaseModel): SUCCESS = "success" FAILED = "failed" PAUSED = "paused" + EXCEPTION = "exception" id: str = Field(default_factory=lambda: str(uuid.uuid4())) """node state id""" @@ -51,7 +52,11 @@ class RouteNodeState(BaseModel): :param run_result: run result """ - if self.status in {RouteNodeState.Status.SUCCESS, RouteNodeState.Status.FAILED}: + if self.status in { + RouteNodeState.Status.SUCCESS, + RouteNodeState.Status.FAILED, + RouteNodeState.Status.EXCEPTION, + }: raise Exception(f"Route state {self.id} already finished") if run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED: @@ -59,6 +64,9 @@ class RouteNodeState(BaseModel): elif run_result.status == WorkflowNodeExecutionStatus.FAILED: self.status = RouteNodeState.Status.FAILED self.failed_reason = run_result.error + elif run_result.status == WorkflowNodeExecutionStatus.EXCEPTION: + self.status = RouteNodeState.Status.EXCEPTION + self.failed_reason = run_result.error else: raise Exception(f"Invalid route status {run_result.status}") diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index 7cffd7bc8e..e03d4a7194 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -5,21 +5,23 @@ import uuid from collections.abc import Generator, Mapping from concurrent.futures import ThreadPoolExecutor, wait from copy import copy, deepcopy -from typing import Any, Optional +from typing import Any, Optional, cast from flask import Flask, current_app from core.app.apps.base_app_queue_manager import GenerateTaskStoppedError from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities.node_entities import NodeRunMetadataKey +from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult from core.workflow.entities.variable_pool import VariablePool, VariableValue from core.workflow.graph_engine.condition_handlers.condition_manager import ConditionManager from core.workflow.graph_engine.entities.event import ( BaseIterationEvent, GraphEngineEvent, GraphRunFailedEvent, + GraphRunPartialSucceededEvent, GraphRunStartedEvent, GraphRunSucceededEvent, + NodeRunExceptionEvent, NodeRunFailedEvent, NodeRunRetrieverResourceEvent, NodeRunStartedEvent, @@ -36,7 +38,9 @@ from core.workflow.graph_engine.entities.runtime_route_state import RouteNodeSta from core.workflow.nodes import NodeType from core.workflow.nodes.answer.answer_stream_processor import AnswerStreamProcessor from core.workflow.nodes.base import BaseNode +from core.workflow.nodes.base.entities import BaseNodeData from core.workflow.nodes.end.end_stream_processor import EndStreamProcessor +from core.workflow.nodes.enums import ErrorStrategy, FailBranchSourceHandle from core.workflow.nodes.event import RunCompletedEvent, RunRetrieverResourceEvent, RunStreamChunkEvent from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING from extensions.ext_database import db @@ -128,6 +132,7 @@ class GraphEngine: def run(self) -> Generator[GraphEngineEvent, None, None]: # trigger graph run start event yield GraphRunStartedEvent() + handle_exceptions = [] try: if self.init_params.workflow_type == WorkflowType.CHAT: @@ -140,13 +145,17 @@ class GraphEngine: ) # run graph - generator = stream_processor.process(self._run(start_node_id=self.graph.root_node_id)) - + generator = stream_processor.process( + self._run(start_node_id=self.graph.root_node_id, handle_exceptions=handle_exceptions) + ) for item in generator: try: yield item if isinstance(item, NodeRunFailedEvent): - yield GraphRunFailedEvent(error=item.route_node_state.failed_reason or "Unknown error.") + yield GraphRunFailedEvent( + error=item.route_node_state.failed_reason or "Unknown error.", + exceptions_count=len(handle_exceptions), + ) return elif isinstance(item, NodeRunSucceededEvent): if item.node_type == NodeType.END: @@ -172,19 +181,24 @@ class GraphEngine: ].strip() except Exception as e: logger.exception("Graph run failed") - yield GraphRunFailedEvent(error=str(e)) + yield GraphRunFailedEvent(error=str(e), exceptions_count=len(handle_exceptions)) return - - # trigger graph run success event - yield GraphRunSucceededEvent(outputs=self.graph_runtime_state.outputs) + # count exceptions to determine partial success + if len(handle_exceptions) > 0: + yield GraphRunPartialSucceededEvent( + exceptions_count=len(handle_exceptions), outputs=self.graph_runtime_state.outputs + ) + else: + # trigger graph run success event + yield GraphRunSucceededEvent(outputs=self.graph_runtime_state.outputs) self._release_thread() except GraphRunFailedError as e: - yield GraphRunFailedEvent(error=e.error) + yield GraphRunFailedEvent(error=e.error, exceptions_count=len(handle_exceptions)) self._release_thread() return except Exception as e: logger.exception("Unknown Error when graph running") - yield GraphRunFailedEvent(error=str(e)) + yield GraphRunFailedEvent(error=str(e), exceptions_count=len(handle_exceptions)) self._release_thread() raise e @@ -198,6 +212,7 @@ class GraphEngine: in_parallel_id: Optional[str] = None, parent_parallel_id: Optional[str] = None, parent_parallel_start_node_id: Optional[str] = None, + handle_exceptions: list[str] = [], ) -> Generator[GraphEngineEvent, None, None]: parallel_start_node_id = None if in_parallel_id: @@ -242,7 +257,7 @@ class GraphEngine: previous_node_id=previous_node_id, thread_pool_id=self.thread_pool_id, ) - + node_instance = cast(BaseNode[BaseNodeData], node_instance) try: # run node generator = self._run_node( @@ -252,6 +267,7 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + handle_exceptions=handle_exceptions, ) for item in generator: @@ -301,7 +317,12 @@ class GraphEngine: if len(edge_mappings) == 1: edge = edge_mappings[0] - + if ( + previous_route_node_state.status == RouteNodeState.Status.EXCEPTION + and node_instance.node_data.error_strategy == ErrorStrategy.FAIL_BRANCH + and edge.run_condition is None + ): + break if edge.run_condition: result = ConditionManager.get_condition_handler( init_params=self.init_params, @@ -334,7 +355,7 @@ class GraphEngine: if len(sub_edge_mappings) == 0: continue - edge = sub_edge_mappings[0] + edge = cast(GraphEdge, sub_edge_mappings[0]) result = ConditionManager.get_condition_handler( init_params=self.init_params, @@ -355,6 +376,7 @@ class GraphEngine: edge_mappings=sub_edge_mappings, in_parallel_id=in_parallel_id, parallel_start_node_id=parallel_start_node_id, + handle_exceptions=handle_exceptions, ) for item in parallel_generator: @@ -369,11 +391,18 @@ class GraphEngine: break next_node_id = final_node_id + elif ( + node_instance.node_data.error_strategy == ErrorStrategy.FAIL_BRANCH + and node_instance.should_continue_on_error + and previous_route_node_state.status == RouteNodeState.Status.EXCEPTION + ): + break else: parallel_generator = self._run_parallel_branches( edge_mappings=edge_mappings, in_parallel_id=in_parallel_id, parallel_start_node_id=parallel_start_node_id, + handle_exceptions=handle_exceptions, ) for item in parallel_generator: @@ -395,6 +424,7 @@ class GraphEngine: edge_mappings: list[GraphEdge], in_parallel_id: Optional[str] = None, parallel_start_node_id: Optional[str] = None, + handle_exceptions: list[str] = [], ) -> Generator[GraphEngineEvent | str, None, None]: # if nodes has no run conditions, parallel run all nodes parallel_id = self.graph.node_parallel_mapping.get(edge_mappings[0].target_node_id) @@ -438,6 +468,7 @@ class GraphEngine: "parallel_start_node_id": edge.target_node_id, "parent_parallel_id": in_parallel_id, "parent_parallel_start_node_id": parallel_start_node_id, + "handle_exceptions": handle_exceptions, }, ) @@ -481,6 +512,7 @@ class GraphEngine: parallel_start_node_id: str, parent_parallel_id: Optional[str] = None, parent_parallel_start_node_id: Optional[str] = None, + handle_exceptions: list[str] = [], ) -> None: """ Run parallel nodes @@ -502,6 +534,7 @@ class GraphEngine: in_parallel_id=parallel_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + handle_exceptions=handle_exceptions, ) for item in generator: @@ -548,6 +581,7 @@ class GraphEngine: parallel_start_node_id: Optional[str] = None, parent_parallel_id: Optional[str] = None, parent_parallel_start_node_id: Optional[str] = None, + handle_exceptions: list[str] = [], ) -> Generator[GraphEngineEvent, None, None]: """ Run node @@ -587,19 +621,55 @@ class GraphEngine: route_node_state.set_finished(run_result=run_result) if run_result.status == WorkflowNodeExecutionStatus.FAILED: - yield NodeRunFailedEvent( - error=route_node_state.failed_reason or "Unknown error.", - id=node_instance.id, - node_id=node_instance.node_id, - node_type=node_instance.node_type, - node_data=node_instance.node_data, - route_node_state=route_node_state, - parallel_id=parallel_id, - parallel_start_node_id=parallel_start_node_id, - parent_parallel_id=parent_parallel_id, - parent_parallel_start_node_id=parent_parallel_start_node_id, - ) + if node_instance.should_continue_on_error: + # if run failed, handle error + run_result = self._handle_continue_on_error( + node_instance, + item.run_result, + self.graph_runtime_state.variable_pool, + handle_exceptions=handle_exceptions, + ) + route_node_state.node_run_result = run_result + route_node_state.status = RouteNodeState.Status.EXCEPTION + if run_result.outputs: + for variable_key, variable_value in run_result.outputs.items(): + # append variables to variable pool recursively + self._append_variables_recursively( + node_id=node_instance.node_id, + variable_key_list=[variable_key], + variable_value=variable_value, + ) + yield NodeRunExceptionEvent( + error=run_result.error or "System Error", + id=node_instance.id, + node_id=node_instance.node_id, + node_type=node_instance.node_type, + node_data=node_instance.node_data, + route_node_state=route_node_state, + parallel_id=parallel_id, + parallel_start_node_id=parallel_start_node_id, + parent_parallel_id=parent_parallel_id, + parent_parallel_start_node_id=parent_parallel_start_node_id, + ) + else: + yield NodeRunFailedEvent( + error=route_node_state.failed_reason or "Unknown error.", + id=node_instance.id, + node_id=node_instance.node_id, + node_type=node_instance.node_type, + node_data=node_instance.node_data, + route_node_state=route_node_state, + parallel_id=parallel_id, + parallel_start_node_id=parallel_start_node_id, + parent_parallel_id=parent_parallel_id, + parent_parallel_start_node_id=parent_parallel_start_node_id, + ) + elif run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED: + if node_instance.should_continue_on_error and self.graph.edge_mapping.get( + node_instance.node_id + ): + run_result.edge_source_handle = FailBranchSourceHandle.SUCCESS if run_result.metadata and run_result.metadata.get(NodeRunMetadataKey.TOTAL_TOKENS): # plus state total_tokens self.graph_runtime_state.total_tokens += int( @@ -735,6 +805,56 @@ class GraphEngine: new_instance.graph_runtime_state.variable_pool = deepcopy(self.graph_runtime_state.variable_pool) return new_instance + def _handle_continue_on_error( + self, + node_instance: BaseNode[BaseNodeData], + error_result: NodeRunResult, + variable_pool: VariablePool, + handle_exceptions: list[str] = [], + ) -> NodeRunResult: + """ + handle continue on error when self._should_continue_on_error is True + + + :param error_result (NodeRunResult): error run result + :param variable_pool (VariablePool): variable pool + :return: excption run result + """ + # add error message and error type to variable pool + variable_pool.add([node_instance.node_id, "error_message"], error_result.error) + variable_pool.add([node_instance.node_id, "error_type"], error_result.error_type) + # add error message to handle_exceptions + handle_exceptions.append(error_result.error) + node_error_args = { + "status": WorkflowNodeExecutionStatus.EXCEPTION, + "error": error_result.error, + "inputs": error_result.inputs, + "metadata": { + NodeRunMetadataKey.ERROR_STRATEGY: node_instance.node_data.error_strategy, + }, + } + + if node_instance.node_data.error_strategy is ErrorStrategy.DEFAULT_VALUE: + return NodeRunResult( + **node_error_args, + outputs={ + **node_instance.node_data.default_value_dict, + "error_message": error_result.error, + "error_type": error_result.error_type, + }, + ) + elif node_instance.node_data.error_strategy is ErrorStrategy.FAIL_BRANCH: + if self.graph.edge_mapping.get(node_instance.node_id): + node_error_args["edge_source_handle"] = FailBranchSourceHandle.FAILED + return NodeRunResult( + **node_error_args, + outputs={ + "error_message": error_result.error, + "error_type": error_result.error_type, + }, + ) + return error_result + class GraphRunFailedError(Exception): def __init__(self, error: str): diff --git a/api/core/workflow/nodes/answer/answer_stream_generate_router.py b/api/core/workflow/nodes/answer/answer_stream_generate_router.py index 8c78016f09..1b948bf592 100644 --- a/api/core/workflow/nodes/answer/answer_stream_generate_router.py +++ b/api/core/workflow/nodes/answer/answer_stream_generate_router.py @@ -6,7 +6,7 @@ from core.workflow.nodes.answer.entities import ( TextGenerateRouteChunk, VarGenerateRouteChunk, ) -from core.workflow.nodes.enums import NodeType +from core.workflow.nodes.enums import ErrorStrategy, NodeType from core.workflow.utils.variable_template_parser import VariableTemplateParser @@ -148,13 +148,18 @@ class AnswerStreamGeneratorRouter: for edge in reverse_edges: source_node_id = edge.source_node_id source_node_type = node_id_config_mapping[source_node_id].get("data", {}).get("type") - if source_node_type in { - NodeType.ANSWER, - NodeType.IF_ELSE, - NodeType.QUESTION_CLASSIFIER, - NodeType.ITERATION, - NodeType.VARIABLE_ASSIGNER, - }: + source_node_data = node_id_config_mapping[source_node_id].get("data", {}) + if ( + source_node_type + in { + NodeType.ANSWER, + NodeType.IF_ELSE, + NodeType.QUESTION_CLASSIFIER, + NodeType.ITERATION, + NodeType.VARIABLE_ASSIGNER, + } + or source_node_data.get("error_strategy") == ErrorStrategy.FAIL_BRANCH + ): answer_dependencies[answer_node_id].append(source_node_id) else: cls._recursive_fetch_answer_dependencies( diff --git a/api/core/workflow/nodes/answer/answer_stream_processor.py b/api/core/workflow/nodes/answer/answer_stream_processor.py index 8a768088da..d94f059058 100644 --- a/api/core/workflow/nodes/answer/answer_stream_processor.py +++ b/api/core/workflow/nodes/answer/answer_stream_processor.py @@ -6,6 +6,7 @@ from core.file import FILE_MODEL_IDENTITY, File from core.workflow.entities.variable_pool import VariablePool from core.workflow.graph_engine.entities.event import ( GraphEngineEvent, + NodeRunExceptionEvent, NodeRunStartedEvent, NodeRunStreamChunkEvent, NodeRunSucceededEvent, @@ -50,7 +51,7 @@ class AnswerStreamProcessor(StreamProcessor): for _ in stream_out_answer_node_ids: yield event - elif isinstance(event, NodeRunSucceededEvent): + elif isinstance(event, NodeRunSucceededEvent | NodeRunExceptionEvent): yield event if event.route_node_state.node_id in self.current_stream_chunk_generating_node_ids: # update self.route_position after all stream event finished diff --git a/api/core/workflow/nodes/base/entities.py b/api/core/workflow/nodes/base/entities.py index fb50fbd6e8..9271867aff 100644 --- a/api/core/workflow/nodes/base/entities.py +++ b/api/core/workflow/nodes/base/entities.py @@ -1,14 +1,124 @@ +import json from abc import ABC -from typing import Optional +from enum import StrEnum +from typing import Any, Optional, Union -from pydantic import BaseModel +from pydantic import BaseModel, model_validator + +from core.workflow.nodes.base.exc import DefaultValueTypeError +from core.workflow.nodes.enums import ErrorStrategy + + +class DefaultValueType(StrEnum): + STRING = "string" + NUMBER = "number" + OBJECT = "object" + ARRAY_NUMBER = "array[number]" + ARRAY_STRING = "array[string]" + ARRAY_OBJECT = "array[object]" + ARRAY_FILES = "array[file]" + + +NumberType = Union[int, float] + + +class DefaultValue(BaseModel): + value: Any + type: DefaultValueType + key: str + + @staticmethod + def _parse_json(value: str) -> Any: + """Unified JSON parsing handler""" + try: + return json.loads(value) + except json.JSONDecodeError: + raise DefaultValueTypeError(f"Invalid JSON format for value: {value}") + + @staticmethod + def _validate_array(value: Any, element_type: DefaultValueType) -> bool: + """Unified array type validation""" + return isinstance(value, list) and all(isinstance(x, element_type) for x in value) + + @staticmethod + def _convert_number(value: str) -> float: + """Unified number conversion handler""" + try: + return float(value) + except ValueError: + raise DefaultValueTypeError(f"Cannot convert to number: {value}") + + @model_validator(mode="after") + def validate_value_type(self) -> "DefaultValue": + if self.type is None: + raise DefaultValueTypeError("type field is required") + + # Type validation configuration + type_validators = { + DefaultValueType.STRING: { + "type": str, + "converter": lambda x: x, + }, + DefaultValueType.NUMBER: { + "type": NumberType, + "converter": self._convert_number, + }, + DefaultValueType.OBJECT: { + "type": dict, + "converter": self._parse_json, + }, + DefaultValueType.ARRAY_NUMBER: { + "type": list, + "element_type": NumberType, + "converter": self._parse_json, + }, + DefaultValueType.ARRAY_STRING: { + "type": list, + "element_type": str, + "converter": self._parse_json, + }, + DefaultValueType.ARRAY_OBJECT: { + "type": list, + "element_type": dict, + "converter": self._parse_json, + }, + } + + validator = type_validators.get(self.type) + if not validator: + if self.type == DefaultValueType.ARRAY_FILES: + # Handle files type + return self + raise DefaultValueTypeError(f"Unsupported type: {self.type}") + + # Handle string input cases + if isinstance(self.value, str) and self.type != DefaultValueType.STRING: + self.value = validator["converter"](self.value) + + # Validate base type + if not isinstance(self.value, validator["type"]): + raise DefaultValueTypeError(f"Value must be {validator['type'].__name__} type for {self.value}") + + # Validate array element types + if validator["type"] == list and not self._validate_array(self.value, validator["element_type"]): + raise DefaultValueTypeError(f"All elements must be {validator['element_type'].__name__} for {self.value}") + + return self class BaseNodeData(ABC, BaseModel): title: str desc: Optional[str] = None + error_strategy: Optional[ErrorStrategy] = None + default_value: Optional[list[DefaultValue]] = None version: str = "1" + @property + def default_value_dict(self): + if self.default_value: + return {item.key: item.value for item in self.default_value} + return {} + class BaseIterationNodeData(BaseNodeData): start_node_id: Optional[str] = None diff --git a/api/core/workflow/nodes/base/exc.py b/api/core/workflow/nodes/base/exc.py new file mode 100644 index 0000000000..ec134e031c --- /dev/null +++ b/api/core/workflow/nodes/base/exc.py @@ -0,0 +1,10 @@ +class BaseNodeError(Exception): + """Base class for node errors.""" + + pass + + +class DefaultValueTypeError(BaseNodeError): + """Raised when the default value type is invalid.""" + + pass diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py index d0fbed31cd..e1e28af60b 100644 --- a/api/core/workflow/nodes/base/node.py +++ b/api/core/workflow/nodes/base/node.py @@ -4,7 +4,7 @@ from collections.abc import Generator, Mapping, Sequence from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar, Union, cast from core.workflow.entities.node_entities import NodeRunResult -from core.workflow.nodes.enums import NodeType +from core.workflow.nodes.enums import CONTINUE_ON_ERROR_NODE_TYPE, NodeType from core.workflow.nodes.event import NodeEvent, RunCompletedEvent from models.workflow import WorkflowNodeExecutionStatus @@ -72,10 +72,7 @@ class BaseNode(Generic[GenericNodeData]): result = self._run() except Exception as e: logger.exception(f"Node {self.node_id} failed to run") - result = NodeRunResult( - status=WorkflowNodeExecutionStatus.FAILED, - error=str(e), - ) + result = NodeRunResult(status=WorkflowNodeExecutionStatus.FAILED, error=str(e), error_type="SystemError") if isinstance(result, NodeRunResult): yield RunCompletedEvent(run_result=result) @@ -137,3 +134,12 @@ class BaseNode(Generic[GenericNodeData]): :return: """ return self._node_type + + @property + def should_continue_on_error(self) -> bool: + """judge if should continue on error + + Returns: + bool: if should continue on error + """ + return self.node_data.error_strategy is not None and self.node_type in CONTINUE_ON_ERROR_NODE_TYPE diff --git a/api/core/workflow/nodes/code/code_node.py b/api/core/workflow/nodes/code/code_node.py index ce283e38ec..19b9078a5c 100644 --- a/api/core/workflow/nodes/code/code_node.py +++ b/api/core/workflow/nodes/code/code_node.py @@ -61,7 +61,9 @@ class CodeNode(BaseNode[CodeNodeData]): # Transform result result = self._transform_result(result, self.node_data.outputs) except (CodeExecutionError, CodeNodeError) as e: - return NodeRunResult(status=WorkflowNodeExecutionStatus.FAILED, inputs=variables, error=str(e)) + return NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, inputs=variables, error=str(e), error_type=type(e).__name__ + ) return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=variables, outputs=result) diff --git a/api/core/workflow/nodes/document_extractor/node.py b/api/core/workflow/nodes/document_extractor/node.py index d490a2eb03..59afe7ac87 100644 --- a/api/core/workflow/nodes/document_extractor/node.py +++ b/api/core/workflow/nodes/document_extractor/node.py @@ -1,6 +1,8 @@ import csv import io import json +import os +import tempfile import docx import pandas as pd @@ -264,14 +266,20 @@ def _extract_text_from_ppt(file_content: bytes) -> str: def _extract_text_from_pptx(file_content: bytes) -> str: try: - with io.BytesIO(file_content) as file: - if dify_config.UNSTRUCTURED_API_URL and dify_config.UNSTRUCTURED_API_KEY: - elements = partition_via_api( - file=file, - api_url=dify_config.UNSTRUCTURED_API_URL, - api_key=dify_config.UNSTRUCTURED_API_KEY, - ) - else: + if dify_config.UNSTRUCTURED_API_URL and dify_config.UNSTRUCTURED_API_KEY: + with tempfile.NamedTemporaryFile(suffix=".pptx", delete=False) as temp_file: + temp_file.write(file_content) + temp_file.flush() + with open(temp_file.name, "rb") as file: + elements = partition_via_api( + file=file, + metadata_filename=temp_file.name, + api_url=dify_config.UNSTRUCTURED_API_URL, + api_key=dify_config.UNSTRUCTURED_API_KEY, + ) + os.unlink(temp_file.name) + else: + with io.BytesIO(file_content) as file: elements = partition_pptx(file=file) return "\n".join([getattr(element, "text", "") for element in elements]) except Exception as e: diff --git a/api/core/workflow/nodes/enums.py b/api/core/workflow/nodes/enums.py index 44be403ee6..6d8ca6f701 100644 --- a/api/core/workflow/nodes/enums.py +++ b/api/core/workflow/nodes/enums.py @@ -22,3 +22,16 @@ class NodeType(StrEnum): VARIABLE_ASSIGNER = "assigner" DOCUMENT_EXTRACTOR = "document-extractor" LIST_OPERATOR = "list-operator" + + +class ErrorStrategy(StrEnum): + FAIL_BRANCH = "fail-branch" + DEFAULT_VALUE = "default-value" + + +class FailBranchSourceHandle(StrEnum): + FAILED = "fail-branch" + SUCCESS = "success-branch" + + +CONTINUE_ON_ERROR_NODE_TYPE = [NodeType.LLM, NodeType.CODE, NodeType.TOOL, NodeType.HTTP_REQUEST] diff --git a/api/core/workflow/nodes/http_request/executor.py b/api/core/workflow/nodes/http_request/executor.py index 22ad2a39f6..90251c27a8 100644 --- a/api/core/workflow/nodes/http_request/executor.py +++ b/api/core/workflow/nodes/http_request/executor.py @@ -21,6 +21,7 @@ from .entities import ( from .exc import ( AuthorizationConfigError, FileFetchError, + HttpRequestNodeError, InvalidHttpMethodError, ResponseSizeError, ) @@ -36,7 +37,7 @@ BODY_TYPE_TO_CONTENT_TYPE = { class Executor: method: Literal["get", "head", "post", "put", "delete", "patch"] url: str - params: Mapping[str, str] | None + params: list[tuple[str, str]] | None content: str | bytes | None data: Mapping[str, Any] | None files: Mapping[str, tuple[str | None, bytes, str]] | None @@ -66,7 +67,7 @@ class Executor: self.method = node_data.method self.auth = node_data.authorization self.timeout = timeout - self.params = {} + self.params = [] self.headers = {} self.content = None self.files = None @@ -88,14 +89,48 @@ class Executor: self.url = self.variable_pool.convert_template(self.node_data.url).text def _init_params(self): - params = _plain_text_to_dict(self.node_data.params) - for key in params: - params[key] = self.variable_pool.convert_template(params[key]).text - self.params = params + """ + Almost same as _init_headers(), difference: + 1. response a list tuple to support same key, like 'aa=1&aa=2' + 2. param value may have '\n', we need to splitlines then extract the variable value. + """ + result = [] + for line in self.node_data.params.splitlines(): + if not (line := line.strip()): + continue + + key, *value = line.split(":", 1) + if not (key := key.strip()): + continue + + value = value[0].strip() if value else "" + result.append( + (self.variable_pool.convert_template(key).text, self.variable_pool.convert_template(value).text) + ) + + self.params = result def _init_headers(self): + """ + Convert the header string of frontend to a dictionary. + + Each line in the header string represents a key-value pair. + Keys and values are separated by ':'. + Empty values are allowed. + + Examples: + 'aa:bb\n cc:dd' -> {'aa': 'bb', 'cc': 'dd'} + 'aa:\n cc:dd\n' -> {'aa': '', 'cc': 'dd'} + 'aa\n cc : dd' -> {'aa': '', 'cc': 'dd'} + + """ headers = self.variable_pool.convert_template(self.node_data.headers).text - self.headers = _plain_text_to_dict(headers) + self.headers = { + key.strip(): (value[0].strip() if value else "") + for line in headers.splitlines() + if line.strip() + for key, *value in [line.split(":", 1)] + } def _init_body(self): body = self.node_data.body @@ -208,8 +243,10 @@ class Executor: "follow_redirects": True, } # request_args = {k: v for k, v in request_args.items() if v is not None} - - response = getattr(ssrf_proxy, self.method)(**request_args) + try: + response = getattr(ssrf_proxy, self.method)(**request_args) + except ssrf_proxy.MaxRetriesExceededError as e: + raise HttpRequestNodeError(str(e)) return response def invoke(self) -> Response: @@ -285,33 +322,6 @@ class Executor: return raw -def _plain_text_to_dict(text: str, /) -> dict[str, str]: - """ - Convert a string of key-value pairs to a dictionary. - - Each line in the input string represents a key-value pair. - Keys and values are separated by ':'. - Empty values are allowed. - - Examples: - 'aa:bb\n cc:dd' -> {'aa': 'bb', 'cc': 'dd'} - 'aa:\n cc:dd\n' -> {'aa': '', 'cc': 'dd'} - 'aa\n cc : dd' -> {'aa': '', 'cc': 'dd'} - - Args: - convert_text (str): The input string to convert. - - Returns: - dict[str, str]: A dictionary of key-value pairs. - """ - return { - key.strip(): (value[0].strip() if value else "") - for line in text.splitlines() - if line.strip() - for key, *value in [line.split(":", 1)] - } - - def _generate_random_string(n: int) -> str: """ Generate a random string of lowercase ASCII letters. diff --git a/api/core/workflow/nodes/http_request/node.py b/api/core/workflow/nodes/http_request/node.py index 2a92a16ede..d040cc9f55 100644 --- a/api/core/workflow/nodes/http_request/node.py +++ b/api/core/workflow/nodes/http_request/node.py @@ -65,6 +65,21 @@ class HttpRequestNode(BaseNode[HttpRequestNodeData]): response = http_executor.invoke() files = self.extract_files(url=http_executor.url, response=response) + if not response.response.is_success and self.should_continue_on_error: + return NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + outputs={ + "status_code": response.status_code, + "body": response.text if not files else "", + "headers": response.headers, + "files": files, + }, + process_data={ + "request": http_executor.to_log(), + }, + error=f"Request failed with status code {response.status_code}", + error_type="HTTPResponseCodeError", + ) return NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, outputs={ @@ -83,6 +98,7 @@ class HttpRequestNode(BaseNode[HttpRequestNodeData]): status=WorkflowNodeExecutionStatus.FAILED, error=str(e), process_data=process_data, + error_type=type(e).__name__, ) @staticmethod diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index bba6ac20d3..cbabe7a3c5 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -9,7 +9,7 @@ from typing import TYPE_CHECKING, Any, Optional, cast from flask import Flask, current_app from configs import dify_config -from core.model_runtime.utils.encoders import jsonable_encoder +from core.variables import IntegerVariable from core.workflow.entities.node_entities import ( NodeRunMetadataKey, NodeRunResult, @@ -155,28 +155,29 @@ class IterationNode(BaseNode[IterationNodeData]): iteration_node_data=self.node_data, index=0, pre_iteration_output=None, + duration=None, ) iter_run_map: dict[str, float] = {} outputs: list[Any] = [None] * len(iterator_list_value) try: if self.node_data.is_parallel: futures: list[Future] = [] - q = Queue() + q: Queue = Queue() thread_pool = GraphEngineThreadPool(max_workers=self.node_data.parallel_nums, max_submit_count=100) for index, item in enumerate(iterator_list_value): future: Future = thread_pool.submit( self._run_single_iter_parallel, - current_app._get_current_object(), - q, - iterator_list_value, - inputs, - outputs, - start_at, - graph_engine, - iteration_graph, - index, - item, - iter_run_map, + flask_app=current_app._get_current_object(), # type: ignore + q=q, + iterator_list_value=iterator_list_value, + inputs=inputs, + outputs=outputs, + start_at=start_at, + graph_engine=graph_engine, + iteration_graph=iteration_graph, + index=index, + item=item, + iter_run_map=iter_run_map, ) future.add_done_callback(thread_pool.task_done_callback) futures.append(future) @@ -208,17 +209,22 @@ class IterationNode(BaseNode[IterationNodeData]): else: for _ in range(len(iterator_list_value)): yield from self._run_single_iter( - iterator_list_value, - variable_pool, - inputs, - outputs, - start_at, - graph_engine, - iteration_graph, - iter_run_map, + iterator_list_value=iterator_list_value, + variable_pool=variable_pool, + inputs=inputs, + outputs=outputs, + start_at=start_at, + graph_engine=graph_engine, + iteration_graph=iteration_graph, + iter_run_map=iter_run_map, ) if self.node_data.error_handle_mode == ErrorHandleMode.REMOVE_ABNORMAL_OUTPUT: outputs = [output for output in outputs if output is not None] + + # Flatten the list of lists + if isinstance(outputs, list) and all(isinstance(output, list) for output in outputs): + outputs = [item for sublist in outputs for item in sublist] + yield IterationRunSucceededEvent( iteration_id=self.id, iteration_node_id=self.node_id, @@ -226,7 +232,7 @@ class IterationNode(BaseNode[IterationNodeData]): iteration_node_data=self.node_data, start_at=start_at, inputs=inputs, - outputs={"output": jsonable_encoder(outputs)}, + outputs={"output": outputs}, steps=len(iterator_list_value), metadata={"total_tokens": graph_engine.graph_runtime_state.total_tokens}, ) @@ -234,8 +240,11 @@ class IterationNode(BaseNode[IterationNodeData]): yield RunCompletedEvent( run_result=NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, - outputs={"output": jsonable_encoder(outputs)}, - metadata={NodeRunMetadataKey.ITERATION_DURATION_MAP: iter_run_map}, + outputs={"output": outputs}, + metadata={ + NodeRunMetadataKey.ITERATION_DURATION_MAP: iter_run_map, + NodeRunMetadataKey.TOTAL_TOKENS: graph_engine.graph_runtime_state.total_tokens, + }, ) ) except IterationNodeError as e: @@ -248,7 +257,7 @@ class IterationNode(BaseNode[IterationNodeData]): iteration_node_data=self.node_data, start_at=start_at, inputs=inputs, - outputs={"output": jsonable_encoder(outputs)}, + outputs={"output": outputs}, steps=len(iterator_list_value), metadata={"total_tokens": graph_engine.graph_runtime_state.total_tokens}, error=str(e), @@ -280,7 +289,7 @@ class IterationNode(BaseNode[IterationNodeData]): :param node_data: node data :return: """ - variable_mapping = { + variable_mapping: dict[str, Sequence[str]] = { f"{node_id}.input_selector": node_data.iterator_selector, } @@ -308,7 +317,7 @@ class IterationNode(BaseNode[IterationNodeData]): sub_node_variable_mapping = node_cls.extract_variable_selector_to_variable_mapping( graph_config=graph_config, config=sub_node_config ) - sub_node_variable_mapping = cast(dict[str, list[str]], sub_node_variable_mapping) + sub_node_variable_mapping = cast(dict[str, Sequence[str]], sub_node_variable_mapping) except NotImplementedError: sub_node_variable_mapping = {} @@ -329,8 +338,12 @@ class IterationNode(BaseNode[IterationNodeData]): return variable_mapping def _handle_event_metadata( - self, event: BaseNodeEvent, iter_run_index: str, parallel_mode_run_id: str - ) -> NodeRunStartedEvent | BaseNodeEvent: + self, + *, + event: BaseNodeEvent | InNodeEvent, + iter_run_index: int, + parallel_mode_run_id: str | None, + ) -> NodeRunStartedEvent | BaseNodeEvent | InNodeEvent: """ add iteration metadata to event. """ @@ -355,9 +368,10 @@ class IterationNode(BaseNode[IterationNodeData]): def _run_single_iter( self, - iterator_list_value: list[str], + *, + iterator_list_value: Sequence[str], variable_pool: VariablePool, - inputs: dict[str, list], + inputs: Mapping[str, list], outputs: list, start_at: datetime, graph_engine: "GraphEngine", @@ -373,12 +387,12 @@ class IterationNode(BaseNode[IterationNodeData]): try: rst = graph_engine.run() # get current iteration index - current_index = variable_pool.get([self.node_id, "index"]).value + index_variable = variable_pool.get([self.node_id, "index"]) + if not isinstance(index_variable, IntegerVariable): + raise IterationIndexNotFoundError(f"iteration {self.node_id} current index not found") + current_index = index_variable.value iteration_run_id = parallel_mode_run_id if parallel_mode_run_id is not None else f"{current_index}" next_index = int(current_index) + 1 - - if current_index is None: - raise IterationIndexNotFoundError(f"iteration {self.node_id} current index not found") for event in rst: if isinstance(event, (BaseNodeEvent | BaseParallelBranchEvent)) and not event.in_iteration_id: event.in_iteration_id = self.node_id @@ -391,7 +405,9 @@ class IterationNode(BaseNode[IterationNodeData]): continue if isinstance(event, NodeRunSucceededEvent): - yield self._handle_event_metadata(event, current_index, parallel_mode_run_id) + yield self._handle_event_metadata( + event=event, iter_run_index=current_index, parallel_mode_run_id=parallel_mode_run_id + ) elif isinstance(event, BaseGraphEvent): if isinstance(event, GraphRunFailedEvent): # iteration run failed @@ -404,7 +420,7 @@ class IterationNode(BaseNode[IterationNodeData]): parallel_mode_run_id=parallel_mode_run_id, start_at=start_at, inputs=inputs, - outputs={"output": jsonable_encoder(outputs)}, + outputs={"output": outputs}, steps=len(iterator_list_value), metadata={"total_tokens": graph_engine.graph_runtime_state.total_tokens}, error=event.error, @@ -417,7 +433,7 @@ class IterationNode(BaseNode[IterationNodeData]): iteration_node_data=self.node_data, start_at=start_at, inputs=inputs, - outputs={"output": jsonable_encoder(outputs)}, + outputs={"output": outputs}, steps=len(iterator_list_value), metadata={"total_tokens": graph_engine.graph_runtime_state.total_tokens}, error=event.error, @@ -429,9 +445,11 @@ class IterationNode(BaseNode[IterationNodeData]): ) ) return - else: - event = cast(InNodeEvent, event) - metadata_event = self._handle_event_metadata(event, current_index, parallel_mode_run_id) + elif isinstance(event, InNodeEvent): + # event = cast(InNodeEvent, event) + metadata_event = self._handle_event_metadata( + event=event, iter_run_index=current_index, parallel_mode_run_id=parallel_mode_run_id + ) if isinstance(event, NodeRunFailedEvent): if self.node_data.error_handle_mode == ErrorHandleMode.CONTINUE_ON_ERROR: yield NodeInIterationFailedEvent( @@ -513,7 +531,7 @@ class IterationNode(BaseNode[IterationNodeData]): iteration_node_data=self.node_data, index=next_index, parallel_mode_run_id=parallel_mode_run_id, - pre_iteration_output=jsonable_encoder(current_iteration_output) if current_iteration_output else None, + pre_iteration_output=current_iteration_output or None, duration=duration, ) @@ -540,10 +558,11 @@ class IterationNode(BaseNode[IterationNodeData]): def _run_single_iter_parallel( self, + *, flask_app: Flask, q: Queue, - iterator_list_value: list[str], - inputs: dict[str, list], + iterator_list_value: Sequence[str], + inputs: Mapping[str, list], outputs: list, start_at: datetime, graph_engine: "GraphEngine", @@ -551,7 +570,7 @@ class IterationNode(BaseNode[IterationNodeData]): index: int, item: Any, iter_run_map: dict[str, float], - ) -> Generator[NodeEvent | InNodeEvent, None, None]: + ): """ run single iteration in parallel mode """ @@ -573,3 +592,4 @@ class IterationNode(BaseNode[IterationNodeData]): parallel_mode_run_id=parallel_mode_run_id, ): q.put(event) + graph_engine.graph_runtime_state.total_tokens += graph_engine_copy.graph_runtime_state.total_tokens diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 8ab0d8b2eb..67e62cb875 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -193,6 +193,7 @@ class LLMNode(BaseNode[LLMNodeData]): error=str(e), inputs=node_inputs, process_data=process_data, + error_type=type(e).__name__, ) ) return @@ -615,16 +616,35 @@ class LLMNode(BaseNode[LLMNodeData]): ) # Insert histories into the prompt prompt_content = prompt_messages[0].content - if "#histories#" in prompt_content: - prompt_content = prompt_content.replace("#histories#", memory_text) + # For issue #11247 - Check if prompt content is a string or a list + prompt_content_type = type(prompt_content) + if prompt_content_type == str: + if "#histories#" in prompt_content: + prompt_content = prompt_content.replace("#histories#", memory_text) + else: + prompt_content = memory_text + "\n" + prompt_content + prompt_messages[0].content = prompt_content + elif prompt_content_type == list: + for content_item in prompt_content: + if content_item.type == PromptMessageContentType.TEXT: + if "#histories#" in content_item.data: + content_item.data = content_item.data.replace("#histories#", memory_text) + else: + content_item.data = memory_text + "\n" + content_item.data else: - prompt_content = memory_text + "\n" + prompt_content - prompt_messages[0].content = prompt_content + raise ValueError("Invalid prompt content type") # Add current query to the prompt message if user_query: - prompt_content = prompt_messages[0].content.replace("#sys.query#", user_query) - prompt_messages[0].content = prompt_content + if prompt_content_type == str: + prompt_content = prompt_messages[0].content.replace("#sys.query#", user_query) + prompt_messages[0].content = prompt_content + elif prompt_content_type == list: + for content_item in prompt_content: + if content_item.type == PromptMessageContentType.TEXT: + content_item.data = user_query + "\n" + content_item.data + else: + raise ValueError("Invalid prompt content type") else: raise TemplateTypeNotSupportError(type_name=str(type(prompt_template))) diff --git a/api/core/workflow/nodes/question_classifier/question_classifier_node.py b/api/core/workflow/nodes/question_classifier/question_classifier_node.py index 7c08e598d9..d555964baf 100644 --- a/api/core/workflow/nodes/question_classifier/question_classifier_node.py +++ b/api/core/workflow/nodes/question_classifier/question_classifier_node.py @@ -142,7 +142,7 @@ class QuestionClassifierNode(LLMNode): "usage": jsonable_encoder(usage), "finish_reason": finish_reason, } - outputs = {"class_name": category_name} + outputs = {"class_name": category_name, "class_id": category_id} return NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index 951e5330a3..9b901c026e 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -56,6 +56,7 @@ class ToolNode(BaseNode[ToolNodeData]): NodeRunMetadataKey.TOOL_INFO: tool_info, }, error=f"Failed to get tool runtime: {str(e)}", + error_type=type(e).__name__, ) # get parameters @@ -89,6 +90,7 @@ class ToolNode(BaseNode[ToolNodeData]): NodeRunMetadataKey.TOOL_INFO: tool_info, }, error=f"Failed to invoke tool: {str(e)}", + error_type=type(e).__name__, ) # convert tool messages diff --git a/api/docker/entrypoint.sh b/api/docker/entrypoint.sh index 2b6a8dd3d0..881263171f 100755 --- a/api/docker/entrypoint.sh +++ b/api/docker/entrypoint.sh @@ -34,7 +34,6 @@ else --workers ${SERVER_WORKER_AMOUNT:-1} \ --worker-class ${SERVER_WORKER_CLASS:-gevent} \ --timeout ${GUNICORN_TIMEOUT:-200} \ - --preload \ app:app fi fi diff --git a/api/extensions/ext_logging.py b/api/extensions/ext_logging.py index 738d5c7bd2..9fc29b4eb1 100644 --- a/api/extensions/ext_logging.py +++ b/api/extensions/ext_logging.py @@ -1,8 +1,11 @@ import logging import os import sys +import uuid from logging.handlers import RotatingFileHandler +import flask + from configs import dify_config from dify_app import DifyApp @@ -22,11 +25,14 @@ def init_app(app: DifyApp): ) # Always add StreamHandler to log to console - log_handlers.append(logging.StreamHandler(sys.stdout)) + sh = logging.StreamHandler(sys.stdout) + sh.addFilter(RequestIdFilter()) + log_formatter = logging.Formatter(fmt=dify_config.LOG_FORMAT) + sh.setFormatter(log_formatter) + log_handlers.append(sh) logging.basicConfig( level=dify_config.LOG_LEVEL, - format=dify_config.LOG_FORMAT, datefmt=dify_config.LOG_DATEFORMAT, handlers=log_handlers, force=True, @@ -44,3 +50,22 @@ def init_app(app: DifyApp): for handler in logging.root.handlers: handler.formatter.converter = time_converter + + +def get_request_id(): + if getattr(flask.g, "request_id", None): + return flask.g.request_id + + new_uuid = uuid.uuid4().hex[:10] + flask.g.request_id = new_uuid + + return new_uuid + + +class RequestIdFilter(logging.Filter): + # This is a logging filter that makes the request ID available for use in + # the logging format. Note that we're checking if we're in a request + # context, as we may want to log things before Flask is fully loaded. + def filter(self, record): + record.req_id = get_request_id() if flask.has_request_context() else "" + return True diff --git a/api/extensions/ext_storage.py b/api/extensions/ext_storage.py index 6c30b7a257..4b66f3801e 100644 --- a/api/extensions/ext_storage.py +++ b/api/extensions/ext_storage.py @@ -1,31 +1,43 @@ import logging -from collections.abc import Generator +from collections.abc import Callable, Generator, Mapping from typing import Union from flask import Flask from configs import dify_config +from configs.middleware.storage.opendal_storage_config import OpenDALScheme from dify_app import DifyApp from extensions.storage.base_storage import BaseStorage from extensions.storage.storage_type import StorageType +logger = logging.getLogger(__name__) + class Storage: - def __init__(self): - self.storage_runner = None - def init_app(self, app: Flask): storage_factory = self.get_storage_factory(dify_config.STORAGE_TYPE) with app.app_context(): self.storage_runner = storage_factory() @staticmethod - def get_storage_factory(storage_type: str) -> type[BaseStorage]: + def get_storage_factory(storage_type: str) -> Callable[[], BaseStorage]: match storage_type: case StorageType.S3: - from extensions.storage.aws_s3_storage import AwsS3Storage + from extensions.storage.opendal_storage import OpenDALStorage - return AwsS3Storage + kwargs = _load_s3_storage_kwargs() + return lambda: OpenDALStorage(scheme=OpenDALScheme.S3, **kwargs) + case StorageType.OPENDAL: + from extensions.storage.opendal_storage import OpenDALStorage + + scheme = OpenDALScheme(dify_config.STORAGE_OPENDAL_SCHEME) + kwargs = _load_opendal_storage_kwargs(scheme) + return lambda: OpenDALStorage(scheme=scheme, **kwargs) + case StorageType.LOCAL: + from extensions.storage.opendal_storage import OpenDALStorage + + kwargs = _load_local_storage_kwargs() + return lambda: OpenDALStorage(scheme=OpenDALScheme.FS, **kwargs) case StorageType.AZURE_BLOB: from extensions.storage.azure_blob_storage import AzureBlobStorage @@ -62,16 +74,14 @@ class Storage: from extensions.storage.supabase_storage import SupabaseStorage return SupabaseStorage - case StorageType.LOCAL | _: - from extensions.storage.local_fs_storage import LocalFsStorage - - return LocalFsStorage + case _: + raise ValueError(f"Unsupported storage type {storage_type}") def save(self, filename, data): try: self.storage_runner.save(filename, data) except Exception as e: - logging.exception(f"Failed to save file {filename}") + logger.exception(f"Failed to save file {filename}") raise e def load(self, filename: str, /, *, stream: bool = False) -> Union[bytes, Generator]: @@ -81,45 +91,120 @@ class Storage: else: return self.load_once(filename) except Exception as e: - logging.exception(f"Failed to load file {filename}") + logger.exception(f"Failed to load file {filename}") raise e def load_once(self, filename: str) -> bytes: try: return self.storage_runner.load_once(filename) except Exception as e: - logging.exception(f"Failed to load_once file {filename}") + logger.exception(f"Failed to load_once file {filename}") raise e def load_stream(self, filename: str) -> Generator: try: return self.storage_runner.load_stream(filename) except Exception as e: - logging.exception(f"Failed to load_stream file {filename}") + logger.exception(f"Failed to load_stream file {filename}") raise e def download(self, filename, target_filepath): try: self.storage_runner.download(filename, target_filepath) except Exception as e: - logging.exception(f"Failed to download file {filename}") + logger.exception(f"Failed to download file {filename}") raise e def exists(self, filename): try: return self.storage_runner.exists(filename) except Exception as e: - logging.exception(f"Failed to check file exists {filename}") + logger.exception(f"Failed to check file exists {filename}") raise e def delete(self, filename): try: return self.storage_runner.delete(filename) except Exception as e: - logging.exception(f"Failed to delete file {filename}") + logger.exception(f"Failed to delete file {filename}") raise e +def _load_s3_storage_kwargs() -> Mapping[str, str]: + """ + Load the kwargs for S3 storage based on dify_config. + Handles special cases like AWS managed IAM and R2. + """ + kwargs = { + "root": "/", + "bucket": dify_config.S3_BUCKET_NAME, + "endpoint": dify_config.S3_ENDPOINT, + "access_key_id": dify_config.S3_ACCESS_KEY, + "secret_access_key": dify_config.S3_SECRET_KEY, + "region": dify_config.S3_REGION, + } + kwargs = {k: v for k, v in kwargs.items() if isinstance(v, str)} + + # For AWS managed IAM + if dify_config.S3_USE_AWS_MANAGED_IAM: + from extensions.storage.opendal_storage import S3_SSE_WITH_AWS_MANAGED_IAM_KWARGS + + logger.debug("Using AWS managed IAM role for S3") + kwargs = {**kwargs, **{k: v for k, v in S3_SSE_WITH_AWS_MANAGED_IAM_KWARGS.items() if k not in kwargs}} + + # For Cloudflare R2 + if kwargs.get("endpoint"): + from extensions.storage.opendal_storage import S3_R2_COMPATIBLE_KWARGS, is_r2_endpoint + + if is_r2_endpoint(kwargs["endpoint"]): + logger.debug("Using R2 for OpenDAL S3") + kwargs = {**kwargs, **{k: v for k, v in S3_R2_COMPATIBLE_KWARGS.items() if k not in kwargs}} + + return kwargs + + +def _load_local_storage_kwargs() -> Mapping[str, str]: + """ + Load the kwargs for local storage based on dify_config. + """ + return { + "root": dify_config.STORAGE_LOCAL_PATH, + } + + +def _load_opendal_storage_kwargs(scheme: OpenDALScheme) -> Mapping[str, str]: + """ + Load the kwargs for OpenDAL storage based on the given scheme. + """ + match scheme: + case OpenDALScheme.FS: + kwargs = { + "root": dify_config.OPENDAL_FS_ROOT, + } + case OpenDALScheme.S3: + # Load OpenDAL S3-related configs + kwargs = { + "root": dify_config.OPENDAL_S3_ROOT, + "bucket": dify_config.OPENDAL_S3_BUCKET, + "endpoint": dify_config.OPENDAL_S3_ENDPOINT, + "access_key_id": dify_config.OPENDAL_S3_ACCESS_KEY_ID, + "secret_access_key": dify_config.OPENDAL_S3_SECRET_ACCESS_KEY, + "region": dify_config.OPENDAL_S3_REGION, + } + + # For Cloudflare R2 + if kwargs.get("endpoint"): + from extensions.storage.opendal_storage import S3_R2_COMPATIBLE_KWARGS, is_r2_endpoint + + if is_r2_endpoint(kwargs["endpoint"]): + logger.debug("Using R2 for OpenDAL S3") + kwargs = {**kwargs, **{k: v for k, v in S3_R2_COMPATIBLE_KWARGS.items() if k not in kwargs}} + case _: + logger.warning(f"Unrecognized OpenDAL scheme: {scheme}, will fall back to default.") + kwargs = {} + return kwargs + + storage = Storage() diff --git a/api/extensions/storage/base_storage.py b/api/extensions/storage/base_storage.py index 50abab8537..0dedd7ff8c 100644 --- a/api/extensions/storage/base_storage.py +++ b/api/extensions/storage/base_storage.py @@ -7,9 +7,6 @@ from collections.abc import Generator class BaseStorage(ABC): """Interface for file storage.""" - def __init__(self): # noqa: B027 - pass - @abstractmethod def save(self, filename, data): raise NotImplementedError diff --git a/api/extensions/storage/local_fs_storage.py b/api/extensions/storage/local_fs_storage.py deleted file mode 100644 index 5a495ca4d4..0000000000 --- a/api/extensions/storage/local_fs_storage.py +++ /dev/null @@ -1,62 +0,0 @@ -import os -import shutil -from collections.abc import Generator -from pathlib import Path - -from flask import current_app - -from configs import dify_config -from extensions.storage.base_storage import BaseStorage - - -class LocalFsStorage(BaseStorage): - """Implementation for local filesystem storage.""" - - def __init__(self): - super().__init__() - folder = dify_config.STORAGE_LOCAL_PATH - if not os.path.isabs(folder): - folder = os.path.join(current_app.root_path, folder) - self.folder = folder - - def _build_filepath(self, filename: str) -> str: - """Build the full file path based on the folder and filename.""" - if not self.folder or self.folder.endswith("/"): - return self.folder + filename - else: - return self.folder + "/" + filename - - def save(self, filename, data): - filepath = self._build_filepath(filename) - folder = os.path.dirname(filepath) - os.makedirs(folder, exist_ok=True) - Path(os.path.join(os.getcwd(), filepath)).write_bytes(data) - - def load_once(self, filename: str) -> bytes: - filepath = self._build_filepath(filename) - if not os.path.exists(filepath): - raise FileNotFoundError("File not found") - return Path(filepath).read_bytes() - - def load_stream(self, filename: str) -> Generator: - filepath = self._build_filepath(filename) - if not os.path.exists(filepath): - raise FileNotFoundError("File not found") - with open(filepath, "rb") as f: - while chunk := f.read(4096): # Read in chunks of 4KB - yield chunk - - def download(self, filename, target_filepath): - filepath = self._build_filepath(filename) - if not os.path.exists(filepath): - raise FileNotFoundError("File not found") - shutil.copyfile(filepath, target_filepath) - - def exists(self, filename): - filepath = self._build_filepath(filename) - return os.path.exists(filepath) - - def delete(self, filename): - filepath = self._build_filepath(filename) - if os.path.exists(filepath): - os.remove(filepath) diff --git a/api/extensions/storage/opendal_storage.py b/api/extensions/storage/opendal_storage.py new file mode 100644 index 0000000000..dc71839c70 --- /dev/null +++ b/api/extensions/storage/opendal_storage.py @@ -0,0 +1,72 @@ +from collections.abc import Generator +from pathlib import Path +from urllib.parse import urlparse + +import opendal + +from configs.middleware.storage.opendal_storage_config import OpenDALScheme +from extensions.storage.base_storage import BaseStorage + +S3_R2_HOSTNAME = "r2.cloudflarestorage.com" +S3_R2_COMPATIBLE_KWARGS = { + "delete_max_size": "700", + "disable_stat_with_override": "true", + "region": "auto", +} +S3_SSE_WITH_AWS_MANAGED_IAM_KWARGS = { + "server_side_encryption": "aws:kms", +} + + +def is_r2_endpoint(endpoint: str) -> bool: + if not endpoint: + return False + + parsed_url = urlparse(endpoint) + return bool(parsed_url.hostname and parsed_url.hostname.endswith(S3_R2_HOSTNAME)) + + +class OpenDALStorage(BaseStorage): + def __init__(self, scheme: OpenDALScheme, **kwargs): + if scheme == OpenDALScheme.FS: + Path(kwargs["root"]).mkdir(parents=True, exist_ok=True) + + self.op = opendal.Operator(scheme=scheme, **kwargs) + + def save(self, filename: str, data: bytes) -> None: + self.op.write(path=filename, bs=data) + + def load_once(self, filename: str) -> bytes: + if not self.exists(filename): + raise FileNotFoundError("File not found") + + return self.op.read(path=filename) + + def load_stream(self, filename: str) -> Generator: + if not self.exists(filename): + raise FileNotFoundError("File not found") + + batch_size = 4096 + file = self.op.open(path=filename, mode="rb") + while chunk := file.read(batch_size): + yield chunk + + def download(self, filename: str, target_filepath: str): + if not self.exists(filename): + raise FileNotFoundError("File not found") + + with Path(target_filepath).open("wb") as f: + f.write(self.op.read(path=filename)) + + def exists(self, filename: str) -> bool: + # FIXME this is a workaround for opendal python-binding do not have a exists method and no better + # error handler here when opendal python-binding has a exists method, we should use it + # more https://github.com/apache/opendal/blob/main/bindings/python/src/operator.rs + try: + return self.op.stat(path=filename).mode.is_file() + except Exception as e: + return False + + def delete(self, filename: str): + if self.exists(filename): + self.op.delete(path=filename) diff --git a/api/extensions/storage/storage_type.py b/api/extensions/storage/storage_type.py index e7fa405afa..0a891e36cf 100644 --- a/api/extensions/storage/storage_type.py +++ b/api/extensions/storage/storage_type.py @@ -9,6 +9,7 @@ class StorageType(StrEnum): HUAWEI_OBS = "huawei-obs" LOCAL = "local" OCI_STORAGE = "oci-storage" + OPENDAL = "opendal" S3 = "s3" TENCENT_COS = "tencent-cos" VOLCENGINE_TOS = "volcengine-tos" diff --git a/api/fields/workflow_run_fields.py b/api/fields/workflow_run_fields.py index 1413adf719..8390c66556 100644 --- a/api/fields/workflow_run_fields.py +++ b/api/fields/workflow_run_fields.py @@ -14,6 +14,7 @@ workflow_run_for_log_fields = { "total_steps": fields.Integer, "created_at": TimestampField, "finished_at": TimestampField, + "exceptions_count": fields.Integer, } workflow_run_for_list_fields = { @@ -27,6 +28,7 @@ workflow_run_for_list_fields = { "created_by_account": fields.Nested(simple_account_fields, attribute="created_by_account", allow_null=True), "created_at": TimestampField, "finished_at": TimestampField, + "exceptions_count": fields.Integer, } advanced_chat_workflow_run_for_list_fields = { @@ -42,6 +44,7 @@ advanced_chat_workflow_run_for_list_fields = { "created_by_account": fields.Nested(simple_account_fields, attribute="created_by_account", allow_null=True), "created_at": TimestampField, "finished_at": TimestampField, + "exceptions_count": fields.Integer, } advanced_chat_workflow_run_pagination_fields = { @@ -73,6 +76,7 @@ workflow_run_detail_fields = { "created_by_end_user": fields.Nested(simple_end_user_fields, attribute="created_by_end_user", allow_null=True), "created_at": TimestampField, "finished_at": TimestampField, + "exceptions_count": fields.Integer, } workflow_run_node_execution_fields = { diff --git a/api/libs/helper.py b/api/libs/helper.py index 3c1b640a20..026ded3506 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -9,11 +9,11 @@ import uuid from collections.abc import Generator, Mapping from datetime import datetime from hashlib import sha256 -from typing import Any, Optional, Union +from typing import Any, Optional, Union, cast +from zoneinfo import available_timezones from flask import Response, stream_with_context -from flask_restful import fields -from zoneinfo import available_timezones +from flask_restful import fields # type: ignore from configs import dify_config from core.app.features.rate_limiting.rate_limit import RateLimitGenerator @@ -168,11 +168,11 @@ def generate_string(n): def extract_remote_ip(request) -> str: if request.headers.get("CF-Connecting-IP"): - return request.headers.get("Cf-Connecting-Ip") + return cast(str, request.headers.get("Cf-Connecting-Ip")) elif request.headers.getlist("X-Forwarded-For"): - return request.headers.getlist("X-Forwarded-For")[0] + return cast(str, request.headers.getlist("X-Forwarded-For")[0]) else: - return request.remote_addr + return cast(str, request.remote_addr) def generate_text_hash(text: str) -> str: @@ -221,12 +221,14 @@ class TokenManager: token_data.update(additional_data) expiry_minutes = dify_config.model_dump().get(f"{token_type.upper()}_TOKEN_EXPIRY_MINUTES") + if expiry_minutes is None: + raise ValueError(f"Expiry minutes for {token_type} token is not set") token_key = cls._get_token_key(token, token_type) expiry_time = int(expiry_minutes * 60) redis_client.setex(token_key, expiry_time, json.dumps(token_data)) if account_id: - cls._set_current_token_for_account(account.id, token, token_type, expiry_minutes) + cls._set_current_token_for_account(account_id, token, token_type, expiry_minutes) return token diff --git a/api/libs/oauth_data_source.py b/api/libs/oauth_data_source.py index 48249e4a35..1d39abd8fa 100644 --- a/api/libs/oauth_data_source.py +++ b/api/libs/oauth_data_source.py @@ -253,6 +253,8 @@ class NotionOAuth(OAuthDataSource): } response = requests.get(url=f"{self._NOTION_BLOCK_SEARCH}/{block_id}", headers=headers) response_json = response.json() + if response.status_code != 200: + raise ValueError(f"Error fetching block parent page ID: {response_json.message}") parent = response_json["parent"] parent_type = parent["type"] if parent_type == "block_id": diff --git a/api/migrations/versions/2024_11_28_0553-cf8f4fc45278_add_exceptions_count_field_to_.py b/api/migrations/versions/2024_11_28_0553-cf8f4fc45278_add_exceptions_count_field_to_.py new file mode 100644 index 0000000000..8c576339ba --- /dev/null +++ b/api/migrations/versions/2024_11_28_0553-cf8f4fc45278_add_exceptions_count_field_to_.py @@ -0,0 +1,33 @@ +"""add exceptions_count field to WorkflowRun model + +Revision ID: cf8f4fc45278 +Revises: 01d6889832f7 +Create Date: 2024-11-28 05:53:21.576178 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'cf8f4fc45278' +down_revision = '01d6889832f7' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_runs', schema=None) as batch_op: + batch_op.add_column(sa.Column('exceptions_count', sa.Integer(), server_default=sa.text('0'), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_runs', schema=None) as batch_op: + batch_op.drop_column('exceptions_count') + + # ### end Alembic commands ### diff --git a/api/models/workflow.py b/api/models/workflow.py index c0e70889a8..1b0af85f08 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -325,6 +325,7 @@ class WorkflowRunStatus(StrEnum): SUCCEEDED = "succeeded" FAILED = "failed" STOPPED = "stopped" + PARTIAL_SUCCESSED = "partial-succeeded" @classmethod def value_of(cls, value: str) -> "WorkflowRunStatus": @@ -395,7 +396,7 @@ class WorkflowRun(db.Model): version = db.Column(db.String(255), nullable=False) graph = db.Column(db.Text) inputs = db.Column(db.Text) - status = db.Column(db.String(255), nullable=False) # running, succeeded, failed, stopped + status = db.Column(db.String(255), nullable=False) # running, succeeded, failed, stopped, partial-succeeded outputs: Mapped[str] = mapped_column(sa.Text, default="{}") error = db.Column(db.Text) elapsed_time = db.Column(db.Float, nullable=False, server_default=db.text("0")) @@ -405,6 +406,7 @@ class WorkflowRun(db.Model): created_by = db.Column(StringUUID, nullable=False) created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) finished_at = db.Column(db.DateTime) + exceptions_count = db.Column(db.Integer, server_default=db.text("0")) @property def created_by_account(self): @@ -464,6 +466,7 @@ class WorkflowRun(db.Model): "created_by": self.created_by, "created_at": self.created_at, "finished_at": self.finished_at, + "exceptions_count": self.exceptions_count, } @classmethod @@ -489,6 +492,7 @@ class WorkflowRun(db.Model): created_by=data.get("created_by"), created_at=data.get("created_at"), finished_at=data.get("finished_at"), + exceptions_count=data.get("exceptions_count"), ) @@ -522,6 +526,7 @@ class WorkflowNodeExecutionStatus(Enum): RUNNING = "running" SUCCEEDED = "succeeded" FAILED = "failed" + EXCEPTION = "exception" @classmethod def value_of(cls, value: str) -> "WorkflowNodeExecutionStatus": diff --git a/api/poetry.lock b/api/poetry.lock index 4c784e53cd..2cdd07202c 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "aiofiles" @@ -483,22 +483,23 @@ vertex = ["google-auth (>=2,<3)"] [[package]] name = "anyio" -version = "4.6.2.post1" +version = "4.7.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" files = [ - {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, - {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, + {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, + {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, ] [package.dependencies] idna = ">=2.8" sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -869,13 +870,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.74" +version = "1.35.76" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.74-py3-none-any.whl", hash = "sha256:9ac9d33d84dd9f05b35085de081552342a2c9ae22e3c4ee105723c9e92c07bd9"}, - {file = "botocore-1.35.74.tar.gz", hash = "sha256:de5c4fa9a24cef3a758974857b5c5820a12fad345ebf33c052a5988e88f33634"}, + {file = "botocore-1.35.76-py3-none-any.whl", hash = "sha256:b4729d12d00267b3185628f83543917b6caae292385230ab464067621aa086af"}, + {file = "botocore-1.35.76.tar.gz", hash = "sha256:a75a42ae53395796b8300c5fefb2d65a8696dc40dc85e49cf3a769e0c0202b13"}, ] [package.dependencies] @@ -954,10 +955,6 @@ files = [ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"}, {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, @@ -970,14 +967,8 @@ files = [ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"}, {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, @@ -988,24 +979,8 @@ files = [ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"}, {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, - {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"}, - {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"}, - {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"}, - {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"}, {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, @@ -1015,10 +990,6 @@ files = [ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"}, {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, @@ -1030,10 +1001,6 @@ files = [ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"}, {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, @@ -1046,10 +1013,6 @@ files = [ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"}, {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, @@ -1062,10 +1025,6 @@ files = [ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"}, {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, @@ -2076,17 +2035,17 @@ tokenizer = ["tiktoken"] [[package]] name = "dataclass-wizard" -version = "0.32.0" +version = "0.32.1" description = "Effortlessly marshal dataclasses to/from JSON. Leverage field properties with default values. Generate dataclass schemas from JSON input." optional = false python-versions = "*" files = [ - {file = "dataclass-wizard-0.32.0.tar.gz", hash = "sha256:b9411bc91a9a0e2224ca6a599923b8e472b170acc14580b2fa6fcf343f720fe5"}, - {file = "dataclass_wizard-0.32.0-py2.py3-none-any.whl", hash = "sha256:36091a8d5b49b43178bf076c948ff5b848d36e42ad20adf78ae2d0312e1c09e4"}, + {file = "dataclass-wizard-0.32.1.tar.gz", hash = "sha256:31d44224ff8acb28abb1bbf11afa5fa73d7eeec8cb2e8f9aed374135c154b617"}, + {file = "dataclass_wizard-0.32.1-py2.py3-none-any.whl", hash = "sha256:ce2c3bbfe48b197162ffeffd74c2d4ae4ca834acf4017b001a906a07109c943b"}, ] [package.extras] -dev = ["Sphinx (==7.4.7)", "Sphinx (==8.1.3)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dacite (==1.8.1)", "dataclass-factory (==2.16)", "dataclass-wizard[toml]", "dataclasses-json (==0.6.7)", "flake8 (>=3)", "jsons (==1.6.3)", "mashumaro (==3.15)", "pip (>=21.3.1)", "pydantic (==2.10.2)", "pytest (==8.3.3)", "pytest-cov (==6.0.0)", "pytest-mock (>=3.6.1)", "python-dotenv (>=1,<2)", "pytimeparse (==1.1.8)", "sphinx-autodoc-typehints (==2.5.0)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "tomli (>=2,<3)", "tomli (>=2,<3)", "tomli-w (>=1,<2)", "tox (==4.23.2)", "twine (==5.1.1)", "watchdog[watchmedo] (==6.0.0)", "wheel (==0.45.1)"] +dev = ["Sphinx (==7.4.7)", "Sphinx (==8.1.3)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dacite (==1.8.1)", "dataclass-factory (==2.16)", "dataclass-wizard[toml]", "dataclasses-json (==0.6.7)", "flake8 (>=3)", "jsons (==1.6.3)", "mashumaro (==3.15)", "pip (>=21.3.1)", "pydantic (==2.10.2)", "pytest (==8.3.4)", "pytest-cov (==6.0.0)", "pytest-mock (>=3.6.1)", "python-dotenv (>=1,<2)", "pytimeparse (==1.1.8)", "sphinx-autodoc-typehints (==2.5.0)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "tomli (>=2,<3)", "tomli (>=2,<3)", "tomli-w (>=1,<2)", "tox (==4.23.2)", "twine (==6.0.1)", "watchdog[watchmedo] (==6.0.0)", "wheel (==0.45.1)"] dotenv = ["python-dotenv (>=1,<2)"] timedelta = ["pytimeparse (>=1.1.7)"] toml = ["tomli (>=2,<3)", "tomli (>=2,<3)", "tomli-w (>=1,<2)"] @@ -2810,61 +2769,61 @@ fonttools = "*" [[package]] name = "fonttools" -version = "4.55.1" +version = "4.55.2" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.55.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c17a6f9814f83772cd6d9c9009928e1afa4ab66210a31ced721556651075a9a0"}, - {file = "fonttools-4.55.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c4d14eecc814826a01db87a40af3407c892ba49996bc6e49961e386cd78b537c"}, - {file = "fonttools-4.55.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8589f9a15dc005592b94ecdc45b4dfae9bbe9e73542e89af5a5e776e745db83b"}, - {file = "fonttools-4.55.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfee95bd9395bcd9e6c78955387554335109b6a613db71ef006020b42f761c58"}, - {file = "fonttools-4.55.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:34fa2ecc0bf1923d1a51bf2216a006de2c3c0db02c6aa1470ea50b62b8619bd5"}, - {file = "fonttools-4.55.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9c1c48483148bfb1b9ad951133ceea957faa004f6cb475b67e7bc75d482b48f8"}, - {file = "fonttools-4.55.1-cp310-cp310-win32.whl", hash = "sha256:3e2fc388ca7d023b3c45badd71016fd4185f93e51a22cfe4bd65378af7fba759"}, - {file = "fonttools-4.55.1-cp310-cp310-win_amd64.whl", hash = "sha256:c4c36c71f69d2b3ee30394b0986e5f8b2c461e7eff48dde49b08a90ded9fcdbd"}, - {file = "fonttools-4.55.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5daab3a55d460577f45bb8f5a8eca01fa6cde43ef2ab943b527991f54b735c41"}, - {file = "fonttools-4.55.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:acf1e80cf96c2fbc79e46f669d8713a9a79faaebcc68e31a9fbe600cf8027992"}, - {file = "fonttools-4.55.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e88a0329f7f88a210f09f79c088fb64f8032fc3ab65e2390a40b7d3a11773026"}, - {file = "fonttools-4.55.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03105b42259a8a94b2f0cbf1bee45f7a8a34e7b26c946a8fb89b4967e44091a8"}, - {file = "fonttools-4.55.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9af3577e821649879ab5774ad0e060af34816af556c77c6d3820345d12bf415e"}, - {file = "fonttools-4.55.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:34bd5de3d0ad085359b79a96575cd6bd1bc2976320ef24a2aa152ead36dbf656"}, - {file = "fonttools-4.55.1-cp311-cp311-win32.whl", hash = "sha256:5da92c4b637f0155a41f345fa81143c8e17425260fcb21521cb2ad4d2cea2a95"}, - {file = "fonttools-4.55.1-cp311-cp311-win_amd64.whl", hash = "sha256:f70234253d15f844e6da1178f019a931f03181463ce0c7b19648b8c370527b07"}, - {file = "fonttools-4.55.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9c372e527d58ba64b695f15f8014e97bc8826cf64d3380fc89b4196edd3c0fa8"}, - {file = "fonttools-4.55.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:845a967d3bef3245ba81fb5582dc731f6c2c8417fa211f1068c56893504bc000"}, - {file = "fonttools-4.55.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03be82bcd4ba4418adf10e6165743f824bb09d6594c2743d7f93ea50968805b"}, - {file = "fonttools-4.55.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c42e935cf146f826f556d977660dac88f2fa3fb2efa27d5636c0b89a60c16edf"}, - {file = "fonttools-4.55.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:96328bf91e05621d8e40d9f854af7a262cb0e8313e9b38e7f3a7f3c4c0caaa8b"}, - {file = "fonttools-4.55.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:291acec4d774e8cd2d8472d88c04643a77a3324a15247951bd6cfc969799b69e"}, - {file = "fonttools-4.55.1-cp312-cp312-win32.whl", hash = "sha256:6d768d6632809aec1c3fa8f195b173386d85602334701a6894a601a4d3c80368"}, - {file = "fonttools-4.55.1-cp312-cp312-win_amd64.whl", hash = "sha256:2a3850afdb0be1f79a1e95340a2059226511675c5b68098d4e49bfbeb48a8aab"}, - {file = "fonttools-4.55.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0c88d427eaf8bd8497b9051f56e0f5f9fb96a311aa7c72cda35e03e18d59cd16"}, - {file = "fonttools-4.55.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f062c95a725a79fd908fe8407b6ad63e230e1c7d6dece2d5d6ecaf843d6927f6"}, - {file = "fonttools-4.55.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f298c5324c45cad073475146bf560f4110ce2dc2488ff12231a343ec489f77bc"}, - {file = "fonttools-4.55.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f06dbb71344ffd85a6cb7e27970a178952f0bdd8d319ed938e64ba4bcc41700"}, - {file = "fonttools-4.55.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4c46b3525166976f5855b1f039b02433dc51eb635fb54d6a111e0c5d6e6cdc4c"}, - {file = "fonttools-4.55.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:af46f52a21e086a2f89b87bd941c9f0f91e5f769e1a5eb3b37c912228814d3e5"}, - {file = "fonttools-4.55.1-cp313-cp313-win32.whl", hash = "sha256:cd7f36335c5725a3fd724cc667c10c3f5254e779bdc5bffefebb33cf5a75ecb1"}, - {file = "fonttools-4.55.1-cp313-cp313-win_amd64.whl", hash = "sha256:5d6394897710ccac7f74df48492d7f02b9586ff0588c66a2c218844e90534b22"}, - {file = "fonttools-4.55.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:52c4f4b383c56e1a4fe8dab1b63c2269ba9eab0695d2d8e033fa037e61e6f1ef"}, - {file = "fonttools-4.55.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d83892dafdbd62b56545c77b6bd4fa49eef6ec1d6b95e042ee2c930503d1831e"}, - {file = "fonttools-4.55.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604d5bf16f811fcaaaec2dde139f7ce958462487565edcd54b6fadacb2942083"}, - {file = "fonttools-4.55.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3324b92feb5fd084923a8e89a8248afd5b9f9d81ab9517d7b07cc84403bd448"}, - {file = "fonttools-4.55.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:30f8b1ca9b919c04850678d026fc330c19acaa9e3b282fcacc09a5eb3c8d20c3"}, - {file = "fonttools-4.55.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:1835c98df2cf28c86a66d234895c87df7b9325fd079a8019c5053a389ff55d23"}, - {file = "fonttools-4.55.1-cp38-cp38-win32.whl", hash = "sha256:9f202703720a7cc0049f2ed1a2047925e264384eb5cc4d34f80200d7b17f1b6a"}, - {file = "fonttools-4.55.1-cp38-cp38-win_amd64.whl", hash = "sha256:2efff20aed0338d37c2ff58766bd67f4b9607ded61cf3d6baf1b3e25ea74e119"}, - {file = "fonttools-4.55.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3032d9bf010c395e6eca2851666cafb1f4ecde85d420188555e928ad0144326e"}, - {file = "fonttools-4.55.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0794055588c30ffe25426048e8a7c0a5271942727cd61fc939391e37f4d580d5"}, - {file = "fonttools-4.55.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13ba980e3ffd3206b8c63a365f90dc10eeec27da946d5ee5373c3a325a46d77c"}, - {file = "fonttools-4.55.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d7063babd7434a17a5e355e87de9b2306c85a5c19c7da0794be15c58aab0c39"}, - {file = "fonttools-4.55.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ed84c15144015a58ef550dd6312884c9fb31a2dbc31a6467bcdafd63be7db476"}, - {file = "fonttools-4.55.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e89419d88b0bbfdb55209e03a17afa2d20db3c2fa0d785543c9d0875668195d5"}, - {file = "fonttools-4.55.1-cp39-cp39-win32.whl", hash = "sha256:6eb781e401b93cda99356bc043ababead2a5096550984d8a4ecf3d5c9f859dc2"}, - {file = "fonttools-4.55.1-cp39-cp39-win_amd64.whl", hash = "sha256:db1031acf04523c5a51c3e1ae19c21a1c32bc5f820a477dd4659a02f9cb82002"}, - {file = "fonttools-4.55.1-py3-none-any.whl", hash = "sha256:4bcfb11f90f48b48c366dd638d773a52fca0d1b9e056dc01df766bf5835baa08"}, - {file = "fonttools-4.55.1.tar.gz", hash = "sha256:85bb2e985718b0df96afc659abfe194c171726054314b019dbbfed31581673c7"}, + {file = "fonttools-4.55.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bef0f8603834643b1a6419d57902f18e7d950ec1a998fb70410635c598dc1a1e"}, + {file = "fonttools-4.55.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:944228b86d472612d3b48bcc83b31c25c2271e63fdc74539adfcfa7a96d487fb"}, + {file = "fonttools-4.55.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f0e55f5da594b85f269cfbecd2f6bd3e07d0abba68870bc3f34854de4fa4678"}, + {file = "fonttools-4.55.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b1a6e576db0c83c1b91925bf1363478c4bb968dbe8433147332fb5782ce6190"}, + {file = "fonttools-4.55.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:616368b15716781bc84df5c2191dc0540137aaef56c2771eb4b89b90933f347a"}, + {file = "fonttools-4.55.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7bbae4f3915225c2c37670da68e2bf18a21206060ad31dfb95fec91ef641caa7"}, + {file = "fonttools-4.55.2-cp310-cp310-win32.whl", hash = "sha256:8b02b10648d69d67a7eb055f4d3eedf4a85deb22fb7a19fbd9acbae7c7538199"}, + {file = "fonttools-4.55.2-cp310-cp310-win_amd64.whl", hash = "sha256:bbea0ab841113ac8e8edde067e099b7288ffc6ac2dded538b131c2c0595d5f77"}, + {file = "fonttools-4.55.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d34525e8141286fa976e14806639d32294bfb38d28bbdb5f6be9f46a1cd695a6"}, + {file = "fonttools-4.55.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ecd1c2b1c2ec46bb73685bc5473c72e16ed0930ef79bc2919ccadc43a99fb16"}, + {file = "fonttools-4.55.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9008438ad59e5a8e403a62fbefef2b2ff377eb3857d90a3f2a5f4d674ff441b2"}, + {file = "fonttools-4.55.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:131591ac8d7a47043aaf29581aba755ae151d46e49d2bf49608601efd71e8b4d"}, + {file = "fonttools-4.55.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4c83381c3e3e3d9caa25527c4300543578341f21aae89e4fbbb4debdda8d82a2"}, + {file = "fonttools-4.55.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42aca564b575252fd9954ed0d91d97a24de24289a16ce8ff74ed0bdf5ecebf11"}, + {file = "fonttools-4.55.2-cp311-cp311-win32.whl", hash = "sha256:c6457f650ebe15baa17fc06e256227f0a47f46f80f27ec5a0b00160de8dc2c13"}, + {file = "fonttools-4.55.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cfa67414d7414442a5635ff634384101c54f53bb7b0e04aa6a61b013fcce194"}, + {file = "fonttools-4.55.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:18f082445b8fe5e91c53e6184f4c1c73f3f965c8bcc614c6cd6effd573ce6c1a"}, + {file = "fonttools-4.55.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c0f91adbbd706e8acd1db73e3e510118e62d0ffb651864567dccc5b2339f90"}, + {file = "fonttools-4.55.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d8ccce035320d63dba0c35f52499322f5531dbe85bba1514c7cea26297e4c54"}, + {file = "fonttools-4.55.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96e126df9615df214ec7f04bebcf60076297fbc10b75c777ce58b702d7708ffb"}, + {file = "fonttools-4.55.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:508ebb42956a7a931c4092dfa2d9b4ffd4f94cea09b8211199090d2bd082506b"}, + {file = "fonttools-4.55.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c1b9de46ef7b683d50400abf9f1578eaceee271ff51c36bf4b7366f2be29f498"}, + {file = "fonttools-4.55.2-cp312-cp312-win32.whl", hash = "sha256:2df61d9fc15199cc86dad29f64dd686874a3a52dda0c2d8597d21f509f95c332"}, + {file = "fonttools-4.55.2-cp312-cp312-win_amd64.whl", hash = "sha256:d337ec087da8216a828574aa0525d869df0a2ac217a2efc1890974ddd1fbc5b9"}, + {file = "fonttools-4.55.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:10aff204e2edee1d312fa595c06f201adf8d528a3b659cfb34cd47eceaaa6a26"}, + {file = "fonttools-4.55.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:09fe922a3eff181fd07dd724cdb441fb6b9fc355fd1c0f1aa79aca60faf1fbdd"}, + {file = "fonttools-4.55.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:487e1e8b524143a799bda0169c48b44a23a6027c1bb1957d5a172a7d3a1dd704"}, + {file = "fonttools-4.55.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b1726872e09268bbedb14dc02e58b7ea31ecdd1204c6073eda4911746b44797"}, + {file = "fonttools-4.55.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fc88cfb58b0cd7b48718c3e61dd0d0a3ee8e2c86b973342967ce09fbf1db6d4"}, + {file = "fonttools-4.55.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e857fe1859901ad8c5cab32e0eebc920adb09f413d2d73b74b677cf47b28590c"}, + {file = "fonttools-4.55.2-cp313-cp313-win32.whl", hash = "sha256:81ccd2b3a420b8050c7d9db3be0555d71662973b3ef2a1d921a2880b58957db8"}, + {file = "fonttools-4.55.2-cp313-cp313-win_amd64.whl", hash = "sha256:d559eb1744c7dcfa90ae60cb1a4b3595e898e48f4198738c321468c01180cd83"}, + {file = "fonttools-4.55.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6b5917ef79cac8300b88fd6113003fd01bbbbea2ea060a27b95d8f77cb4c65c2"}, + {file = "fonttools-4.55.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:663eba5615d6abaaf616432354eb7ce951d518e43404371bcc2b0694ef21e8d6"}, + {file = "fonttools-4.55.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:803d5cef5fc47f44f5084d154aa3d6f069bb1b60e32390c225f897fa19b0f939"}, + {file = "fonttools-4.55.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bc5f100de0173cc39102c0399bd6c3bd544bbdf224957933f10ee442d43cddd"}, + {file = "fonttools-4.55.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3d9bbc1e380fdaf04ad9eabd8e3e6a4301eaf3487940893e9fd98537ea2e283b"}, + {file = "fonttools-4.55.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:42a9afedff07b6f75aa0f39b5e49922ac764580ef3efce035ca30284b2ee65c8"}, + {file = "fonttools-4.55.2-cp38-cp38-win32.whl", hash = "sha256:f1c76f423f1a241df08f87614364dff6e0b7ce23c962c1b74bd995ec7c0dad13"}, + {file = "fonttools-4.55.2-cp38-cp38-win_amd64.whl", hash = "sha256:25062b6ca03464dd5179fc2040fb19e03391b7cc49b9cc4f879312e638605c5c"}, + {file = "fonttools-4.55.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d1100d8e665fe386a79cab59446992de881ea74d0d6c191bb988642692aa2421"}, + {file = "fonttools-4.55.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbdc251c5e472e5ae6bc816f9b82718b8e93ff7992e7331d6cf3562b96aa268e"}, + {file = "fonttools-4.55.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0bf24d2b02dbc9376d795a63062632ff73e3e9e60c0229373f500aed7e86dd7"}, + {file = "fonttools-4.55.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4ff250ed4ff05015dfd9cf2adf7570c7a383ca80f4d9732ac484a5ed0d8453c"}, + {file = "fonttools-4.55.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:44cf2a98aa661dbdeb8c03f5e405b074e2935196780bb729888639f5276067d9"}, + {file = "fonttools-4.55.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22ef222740eb89d189bf0612eb98fbae592c61d7efeac51bfbc2a1592d469557"}, + {file = "fonttools-4.55.2-cp39-cp39-win32.whl", hash = "sha256:93f439ca27e55f585e7aaa04a74990acd983b5f2245e41d6b79f0a8b44e684d8"}, + {file = "fonttools-4.55.2-cp39-cp39-win_amd64.whl", hash = "sha256:627cf10d6f5af5bec6324c18a2670f134c29e1b7dce3fb62e8ef88baa6cba7a9"}, + {file = "fonttools-4.55.2-py3-none-any.whl", hash = "sha256:8e2d89fbe9b08d96e22c7a81ec04a4e8d8439c31223e2dc6f2f9fc8ff14bdf9f"}, + {file = "fonttools-4.55.2.tar.gz", hash = "sha256:45947e7b3f9673f91df125d375eb57b9a23f2a603f438a1aebf3171bffa7a205"}, ] [package.extras] @@ -3827,13 +3786,13 @@ setuptools = "*" [[package]] name = "gunicorn" -version = "22.0.0" +version = "23.0.0" description = "WSGI HTTP Server for UNIX" optional = false python-versions = ">=3.7" files = [ - {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, - {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, + {file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"}, + {file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"}, ] [package.dependencies] @@ -3874,105 +3833,120 @@ hyperframe = ">=6.0,<7" [[package]] name = "hiredis" -version = "3.0.0" +version = "3.1.0" description = "Python wrapper for hiredis" optional = false python-versions = ">=3.8" files = [ - {file = "hiredis-3.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:4b182791c41c5eb1d9ed736f0ff81694b06937ca14b0d4dadde5dadba7ff6dae"}, - {file = "hiredis-3.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:13c275b483a052dd645eb2cb60d6380f1f5215e4c22d6207e17b86be6dd87ffa"}, - {file = "hiredis-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1018cc7f12824506f165027eabb302735b49e63af73eb4d5450c66c88f47026"}, - {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83a29cc7b21b746cb6a480189e49f49b2072812c445e66a9e38d2004d496b81c"}, - {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e241fab6332e8fb5f14af00a4a9c6aefa22f19a336c069b7ddbf28ef8341e8d6"}, - {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1fb8de899f0145d6c4d5d4bd0ee88a78eb980a7ffabd51e9889251b8f58f1785"}, - {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b23291951959141173eec10f8573538e9349fa27f47a0c34323d1970bf891ee5"}, - {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e421ac9e4b5efc11705a0d5149e641d4defdc07077f748667f359e60dc904420"}, - {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:77c8006c12154c37691b24ff293c077300c22944018c3ff70094a33e10c1d795"}, - {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:41afc0d3c18b59eb50970479a9c0e5544fb4b95e3a79cf2fbaece6ddefb926fe"}, - {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:04ccae6dcd9647eae6025425ab64edb4d79fde8b9e6e115ebfabc6830170e3b2"}, - {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fe91d62b0594db5ea7d23fc2192182b1a7b6973f628a9b8b2e0a42a2be721ac6"}, - {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:99516d99316062824a24d145d694f5b0d030c80da693ea6f8c4ecf71a251d8bb"}, - {file = "hiredis-3.0.0-cp310-cp310-win32.whl", hash = "sha256:562eaf820de045eb487afaa37e6293fe7eceb5b25e158b5a1974b7e40bf04543"}, - {file = "hiredis-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a1c81c89ed765198da27412aa21478f30d54ef69bf5e4480089d9c3f77b8f882"}, - {file = "hiredis-3.0.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:4664dedcd5933364756d7251a7ea86d60246ccf73a2e00912872dacbfcef8978"}, - {file = "hiredis-3.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:47de0bbccf4c8a9f99d82d225f7672b9dd690d8fd872007b933ef51a302c9fa6"}, - {file = "hiredis-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e43679eca508ba8240d016d8cca9d27342d70184773c15bea78a23c87a1922f1"}, - {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13c345e7278c210317e77e1934b27b61394fee0dec2e8bd47e71570900f75823"}, - {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00018f22f38530768b73ea86c11f47e8d4df65facd4e562bd78773bd1baef35e"}, - {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ea3a86405baa8eb0d3639ced6926ad03e07113de54cb00fd7510cb0db76a89d"}, - {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c073848d2b1d5561f3903879ccf4e1a70c9b1e7566c7bdcc98d082fa3e7f0a1d"}, - {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a8dffb5f5b3415a4669d25de48b617fd9d44b0bccfc4c2ab24b06406ecc9ecb"}, - {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:22c17c96143c2a62dfd61b13803bc5de2ac526b8768d2141c018b965d0333b66"}, - {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c3ece960008dab66c6b8bb3a1350764677ee7c74ccd6270aaf1b1caf9ccebb46"}, - {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f75999ae00a920f7dce6ecae76fa5e8674a3110e5a75f12c7a2c75ae1af53396"}, - {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e069967cbd5e1900aafc4b5943888f6d34937fc59bf8918a1a546cb729b4b1e4"}, - {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0aacc0a78e1d94d843a6d191f224a35893e6bdfeb77a4a89264155015c65f126"}, - {file = "hiredis-3.0.0-cp311-cp311-win32.whl", hash = "sha256:719c32147ba29528cb451f037bf837dcdda4ff3ddb6cdb12c4216b0973174718"}, - {file = "hiredis-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:bdc144d56333c52c853c31b4e2e52cfbdb22d3da4374c00f5f3d67c42158970f"}, - {file = "hiredis-3.0.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:484025d2eb8f6348f7876fc5a2ee742f568915039fcb31b478fd5c242bb0fe3a"}, - {file = "hiredis-3.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:fcdb552ffd97151dab8e7bc3ab556dfa1512556b48a367db94b5c20253a35ee1"}, - {file = "hiredis-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bb6f9fd92f147ba11d338ef5c68af4fd2908739c09e51f186e1d90958c68cc1"}, - {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa86bf9a0ed339ec9e8a9a9d0ae4dccd8671625c83f9f9f2640729b15e07fbfd"}, - {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e194a0d5df9456995d8f510eab9f529213e7326af6b94770abf8f8b7952ddcaa"}, - {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a1df39d74ec507d79c7a82c8063eee60bf80537cdeee652f576059b9cdd15c"}, - {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f91456507427ba36fd81b2ca11053a8e112c775325acc74e993201ea912d63e9"}, - {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9862db92ef67a8a02e0d5370f07d380e14577ecb281b79720e0d7a89aedb9ee5"}, - {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d10fcd9e0eeab835f492832b2a6edb5940e2f1230155f33006a8dfd3bd2c94e4"}, - {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:48727d7d405d03977d01885f317328dc21d639096308de126c2c4e9950cbd3c9"}, - {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e0bb6102ebe2efecf8a3292c6660a0e6fac98176af6de67f020bea1c2343717"}, - {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:df274e3abb4df40f4c7274dd3e587dfbb25691826c948bc98d5fead019dfb001"}, - {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:034925b5fb514f7b11aac38cd55b3fd7e9d3af23bd6497f3f20aa5b8ba58e232"}, - {file = "hiredis-3.0.0-cp312-cp312-win32.whl", hash = "sha256:120f2dda469b28d12ccff7c2230225162e174657b49cf4cd119db525414ae281"}, - {file = "hiredis-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:e584fe5f4e6681d8762982be055f1534e0170f6308a7a90f58d737bab12ff6a8"}, - {file = "hiredis-3.0.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:122171ff47d96ed8dd4bba6c0e41d8afaba3e8194949f7720431a62aa29d8895"}, - {file = "hiredis-3.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:ba9fc605ac558f0de67463fb588722878641e6fa1dabcda979e8e69ff581d0bd"}, - {file = "hiredis-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a631e2990b8be23178f655cae8ac6c7422af478c420dd54e25f2e26c29e766f1"}, - {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63482db3fadebadc1d01ad33afa6045ebe2ea528eb77ccaabd33ee7d9c2bad48"}, - {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f669212c390eebfbe03c4e20181f5970b82c5d0a0ad1df1785f7ffbe7d61150"}, - {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a49ef161739f8018c69b371528bdb47d7342edfdee9ddc75a4d8caddf45a6e"}, - {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98a152052b8878e5e43a2e3a14075218adafc759547c98668a21e9485882696c"}, - {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50a196af0ce657fcde9bf8a0bbe1032e22c64d8fcec2bc926a35e7ff68b3a166"}, - {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f2f312eef8aafc2255e3585dcf94d5da116c43ef837db91db9ecdc1bc930072d"}, - {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:6ca41fa40fa019cde42c21add74aadd775e71458051a15a352eabeb12eb4d084"}, - {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:6eecb343c70629f5af55a8b3e53264e44fa04e155ef7989de13668a0cb102a90"}, - {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:c3fdad75e7837a475900a1d3a5cc09aa024293c3b0605155da2d42f41bc0e482"}, - {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8854969e7480e8d61ed7549eb232d95082a743e94138d98d7222ba4e9f7ecacd"}, - {file = "hiredis-3.0.0-cp38-cp38-win32.whl", hash = "sha256:f114a6c86edbf17554672b050cce72abf489fe58d583c7921904d5f1c9691605"}, - {file = "hiredis-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:7d99b91e42217d7b4b63354b15b41ce960e27d216783e04c4a350224d55842a4"}, - {file = "hiredis-3.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:4c6efcbb5687cf8d2aedcc2c3ed4ac6feae90b8547427d417111194873b66b06"}, - {file = "hiredis-3.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5b5cff42a522a0d81c2ae7eae5e56d0ee7365e0c4ad50c4de467d8957aff4414"}, - {file = "hiredis-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:82f794d564f4bc76b80c50b03267fe5d6589e93f08e66b7a2f674faa2fa76ebc"}, - {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7a4c1791d7aa7e192f60fe028ae409f18ccdd540f8b1e6aeb0df7816c77e4a4"}, - {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2537b2cd98192323fce4244c8edbf11f3cac548a9d633dbbb12b48702f379f4"}, - {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fed69bbaa307040c62195a269f82fc3edf46b510a17abb6b30a15d7dab548df"}, - {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:869f6d5537d243080f44253491bb30aa1ec3c21754003b3bddeadedeb65842b0"}, - {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d435ae89073d7cd51e6b6bf78369c412216261c9c01662e7008ff00978153729"}, - {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:204b79b30a0e6be0dc2301a4d385bb61472809f09c49f400497f1cdd5a165c66"}, - {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3ea635101b739c12effd189cc19b2671c268abb03013fd1f6321ca29df3ca625"}, - {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f359175197fd833c8dd7a8c288f1516be45415bb5c939862ab60c2918e1e1943"}, - {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ac6d929cb33dd12ad3424b75725975f0a54b5b12dbff95f2a2d660c510aa106d"}, - {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:100431e04d25a522ef2c3b94f294c4219c4de3bfc7d557b6253296145a144c11"}, - {file = "hiredis-3.0.0-cp39-cp39-win32.whl", hash = "sha256:e1a9c14ae9573d172dc050a6f63a644457df5d01ec4d35a6a0f097f812930f83"}, - {file = "hiredis-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:54a6dd7b478e6eb01ce15b3bb5bf771e108c6c148315bf194eb2ab776a3cac4d"}, - {file = "hiredis-3.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:50da7a9edf371441dfcc56288d790985ee9840d982750580710a9789b8f4a290"}, - {file = "hiredis-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b285ef6bf1581310b0d5e8f6ce64f790a1c40e89c660e1320b35f7515433672"}, - {file = "hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dcfa684966f25b335072115de2f920228a3c2caf79d4bfa2b30f6e4f674a948"}, - {file = "hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a41be8af1fd78ca97bc948d789a09b730d1e7587d07ca53af05758f31f4b985d"}, - {file = "hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:038756db735e417ab36ee6fd7725ce412385ed2bd0767e8179a4755ea11b804f"}, - {file = "hiredis-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fcecbd39bd42cef905c0b51c9689c39d0cc8b88b1671e7f40d4fb213423aef3a"}, - {file = "hiredis-3.0.0-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a131377493a59fb0f5eaeb2afd49c6540cafcfba5b0b3752bed707be9e7c4eaf"}, - {file = "hiredis-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d22c53f0ec5c18ecb3d92aa9420563b1c5d657d53f01356114978107b00b860"}, - {file = "hiredis-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8a91e9520fbc65a799943e5c970ffbcd67905744d8becf2e75f9f0a5e8414f0"}, - {file = "hiredis-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dc8043959b50141df58ab4f398e8ae84c6f9e673a2c9407be65fc789138f4a6"}, - {file = "hiredis-3.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51b99cfac514173d7b8abdfe10338193e8a0eccdfe1870b646009d2fb7cbe4b5"}, - {file = "hiredis-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:fa1fcad89d8a41d8dc10b1e54951ec1e161deabd84ed5a2c95c3c7213bdb3514"}, - {file = "hiredis-3.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:898636a06d9bf575d2c594129085ad6b713414038276a4bfc5db7646b8a5be78"}, - {file = "hiredis-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:466f836dbcf86de3f9692097a7a01533dc9926986022c6617dc364a402b265c5"}, - {file = "hiredis-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23142a8af92a13fc1e3f2ca1d940df3dcf2af1d176be41fe8d89e30a837a0b60"}, - {file = "hiredis-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:793c80a3d6b0b0e8196a2d5de37a08330125668c8012922685e17aa9108c33ac"}, - {file = "hiredis-3.0.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:467d28112c7faa29b7db743f40803d927c8591e9da02b6ce3d5fadc170a542a2"}, - {file = "hiredis-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:dc384874a719c767b50a30750f937af18842ee5e288afba95a5a3ed703b1515a"}, - {file = "hiredis-3.0.0.tar.gz", hash = "sha256:fed8581ae26345dea1f1e0d1a96e05041a727a45e7d8d459164583e23c6ac441"}, + {file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:2892db9db21f0cf7cc298d09f85d3e1f6dc4c4c24463ab67f79bc7a006d51867"}, + {file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:93cfa6cc25ee2ceb0be81dc61eca9995160b9e16bdb7cca4a00607d57e998918"}, + {file = "hiredis-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2af62070aa9433802cae7be7364d5e82f76462c6a2ae34e53008b637aaa9a156"}, + {file = "hiredis-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:072c162260ebb1d892683107da22d0d5da7a1414739eae4e185cac22fe89627f"}, + {file = "hiredis-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6b232c43e89755ba332c2745ddab059c0bc1a0f01448a3a14d506f8448b1ce6"}, + {file = "hiredis-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb5316c9a65c4dde80796aa245b76011bab64eb84461a77b0a61c1bf2970bcc9"}, + {file = "hiredis-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e812a4e656bbd1c1c15c844b28259c49e26bb384837e44e8d2aa55412c91d2f7"}, + {file = "hiredis-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93a6c9230e5a5565847130c0e1005c8d3aa5ca681feb0ed542c4651323d32feb"}, + {file = "hiredis-3.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a5f65e89ce50a94d9490d5442a649c6116f53f216c8c14eb37cf9637956482b2"}, + {file = "hiredis-3.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9b2d6e33601c67c074c367fdccdd6033e642284e7a56adc130f18f724c378ca8"}, + {file = "hiredis-3.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:bad3b1e0c83849910f28c95953417106f539277035a4b515d1425f93947bc28f"}, + {file = "hiredis-3.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:9646de31f5994e6218311dcf216e971703dbf804c510fd3f84ddb9813c495824"}, + {file = "hiredis-3.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59a9230f3aa38a33d09d8171400de202f575d7a38869e5ce2947829bca6fe359"}, + {file = "hiredis-3.1.0-cp310-cp310-win32.whl", hash = "sha256:0322d70f3328b97da14b6e98b18f0090a12ed8a8bf7ae20932e2eb9d1bb0aa2c"}, + {file = "hiredis-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:802474c18e878b3f9905e160a8b7df87d57885758083eda76c5978265acb41aa"}, + {file = "hiredis-3.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:c339ff4b4739b2a40da463763dd566129762f72926bca611ad9a457a9fe64abd"}, + {file = "hiredis-3.1.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:0ffa2552f704a45954627697a378fc2f559004e53055b82f00daf30bd4305330"}, + {file = "hiredis-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9acf7f0e7106f631cd618eb60ec9bbd6e43045addd5310f66ba1177209567e59"}, + {file = "hiredis-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea4f5ecf9dbea93c827486f59c606684c3496ea71c7ba9a8131932780696e61a"}, + {file = "hiredis-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39efab176fca3d5111075f6ba56cd864f18db46d858289d39360c5672e0e5c3e"}, + {file = "hiredis-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1110eae007f30e70a058d743e369c24430327cd01fd97d99519d6794a58dd587"}, + {file = "hiredis-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b390f63191bcccbb6044d4c118acdf4fa55f38e5658ac4cfd5a33a6f0c07659"}, + {file = "hiredis-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72a98ccc7b8ec9ce0100ecf59f45f05d2023606e8e3676b07a316d1c1c364072"}, + {file = "hiredis-3.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7c76e751fd1e2f221dec09cdc24040ee486886e943d5d7ffc256e8cf15c75e51"}, + {file = "hiredis-3.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7d3880f213b6f14e9c69ce52beffd1748eecc8669698c4782761887273b6e1bd"}, + {file = "hiredis-3.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:87c2b3fe7e7c96eba376506a76e11514e07e848f737b254e0973e4b5c3a491e9"}, + {file = "hiredis-3.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d3cfb4089e96f8f8ee9554da93148a9261aa6612ad2cc202c1a494c7b712e31f"}, + {file = "hiredis-3.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f12018e5c5f866a1c3f7017cb2d88e5c6f9440df2281e48865a2b6c40f247f4"}, + {file = "hiredis-3.1.0-cp311-cp311-win32.whl", hash = "sha256:107b66ce977bb2dff8f2239e68344360a75d05fed3d9fa0570ac4d3020ce2396"}, + {file = "hiredis-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8f1240bde53d3d1676f0aba61b3661560dc9a681cae24d9de33e650864029aa4"}, + {file = "hiredis-3.1.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:f7c7f89e0bc4246115754e2eda078a111282f6d6ecc6fb458557b724fe6f2aac"}, + {file = "hiredis-3.1.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:3dbf9163296fa45fbddcfc4c5900f10e9ddadda37117dbfb641e327e536b53e0"}, + {file = "hiredis-3.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af46a4be0e82df470f68f35316fa16cd1e134d1c5092fc1082e1aad64cce716d"}, + {file = "hiredis-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc63d698c43aea500a84d8b083f830c03808b6cf3933ae4d35a27f0a3d881652"}, + {file = "hiredis-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:676b3d88674134bfaaf70dac181d1790b0f33b3187bfb9da9221e17e0e624f83"}, + {file = "hiredis-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aed10d9df1e2fb0011db2713ac64497462e9c2c0208b648c97569da772b959ca"}, + {file = "hiredis-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b5bd8adfe8742e331a94cccd782bffea251fa70d9a709e71f4510f50794d700"}, + {file = "hiredis-3.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fc4e35b4afb0af6da55495dd0742ad32ab88150428a6ecdbb3085cbd60714e8"}, + {file = "hiredis-3.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:89b83e76eb00ab0464e7b0752a3ffcb02626e742e9509bc141424a9c3202e8dc"}, + {file = "hiredis-3.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98ebf08c907836b70a8f40e030df8ab6f174dc7f6fa765251d813e89f14069d8"}, + {file = "hiredis-3.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6c840b9cec086328f2ee2cfee0038b5d6bbb514bac7b5e579da6e346eaac056c"}, + {file = "hiredis-3.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c5c44e9fa6f4462d0330cb5f5d46fa652512fc86b41d4d1974d0356f263e9105"}, + {file = "hiredis-3.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e665b14ab50aa175cfa306fcb00fffd4e3ff02ceb36ca6a4df00b1246d6a73c4"}, + {file = "hiredis-3.1.0-cp312-cp312-win32.whl", hash = "sha256:bd33db977ac7af97e8d035ffadb163b00546be22e5f1297b2123f5f9bf0f8a21"}, + {file = "hiredis-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:37aed4aa9348600145e2d019c7be27855e503ecc4906c6976ff2f3b52e3d5d97"}, + {file = "hiredis-3.1.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:b87cddd8107487863fed6994de51e5594a0be267b0b19e213694e99cdd614623"}, + {file = "hiredis-3.1.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:d302deff8cb63a7feffc1844e4dafc8076e566bbf10c5aaaf0f4fe791b8a6bd0"}, + {file = "hiredis-3.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a018340c073cf88cb635b2bedff96619df2f666018c655e7911f46fa2c1c178"}, + {file = "hiredis-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1e8ba6414ac1ae536129e18c069f3eb497df5a74e136e3566471620a4fa5f95"}, + {file = "hiredis-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a86b9fef256c2beb162244791fdc025aa55f936d6358e86e2020e512fe2e4972"}, + {file = "hiredis-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7acdc68e29a446ad17aadaff19c981a36b3bd8c894c3520412c8a7ab1c3e0de7"}, + {file = "hiredis-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7e06baea05de57e1e7548064f505a6964e992674fe61b8f274afe2ac93b6371"}, + {file = "hiredis-3.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35b5fc061c8a0dbfdb440053280504d6aaa8d9726bd4d1d0e1cfcbbdf0d60b73"}, + {file = "hiredis-3.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c89d2dcb271d24c44f02264233b75d5db8c58831190fa92456a90b87fa17b748"}, + {file = "hiredis-3.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:aa36688c10a08f626fddcf68c2b1b91b0e90b070c26e550a4151a877f5c2d431"}, + {file = "hiredis-3.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3982a9c16c1c4bc05a00b65d01ffb8d80ea1a7b6b533be2f1a769d3e989d2c0"}, + {file = "hiredis-3.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d1a6f889514ee2452300c9a06862fceedef22a2891f1c421a27b1ba52ef130b2"}, + {file = "hiredis-3.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8a45ff7915392a55d9386bb235ea1d1eb9960615f301979f02143fc20036b699"}, + {file = "hiredis-3.1.0-cp313-cp313-win32.whl", hash = "sha256:539e5bb725b62b76a5319a4e68fc7085f01349abc2316ef3df608ea0883c51d2"}, + {file = "hiredis-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9020fd7e58f489fda6a928c31355add0e665fd6b87b21954e675cf9943eafa32"}, + {file = "hiredis-3.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:b621a89fc29b3f4b01be6640ec81a6a94b5382bc78fecb876408d57a071e45aa"}, + {file = "hiredis-3.1.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:363e21fba55e1a26349dc9ca7da6b14332123879b6359bcee4a9acecb40ca33b"}, + {file = "hiredis-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c156156798729eadc9ab76ffee96c88b93cc1c3b493f4dd0a4341f53939194ee"}, + {file = "hiredis-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e38d8a325f9a6afac1b1c72d996d1add9e1b99696ce9410538ba5e9aa8fdba02"}, + {file = "hiredis-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3004ef7436feb7bfa61c0b36d422b8fb8c29aaa1a514c9405f0fdee5e9694dd3"}, + {file = "hiredis-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13f5b16f97d0bbd1c04ce367c49097d1214d60e11f9fee7ef2a9b54e0a6645c8"}, + {file = "hiredis-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:230dd0e77cb0f525f58a1306a7b4aaf078037fc5229110922332ca46f90821bb"}, + {file = "hiredis-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d968116caddd19d63120d1298e62b1bbc694db3360ed0d5df8c3a97edbc12552"}, + {file = "hiredis-3.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:511e36a6fa41d3efab3cd5cd70ac388ed825993b9e66fa3b0e47cf27a2f5ffee"}, + {file = "hiredis-3.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c5cd20804e3cb0d31e7d899d8dd091f569c33fe40d4bade670a067ab7d31c2ac"}, + {file = "hiredis-3.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:09e89e7d34cfe5ca8f7a869fca827d1af0afe8aaddb26b38c01058730edb79ad"}, + {file = "hiredis-3.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:570cbf31413c77fe5e7c157f2943ca4400493ddd9cf2184731cfcafc753becd7"}, + {file = "hiredis-3.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b9b4da8162cf289781732d6a5ba01d820c42c05943fcdb7de307d03639961db3"}, + {file = "hiredis-3.1.0-cp38-cp38-win32.whl", hash = "sha256:bc117a04bcb461d3bb1b2c5b417aee3442e1e8aa33ebc800481431f4c09fe0c5"}, + {file = "hiredis-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:34f3f5f0354db2d6797a6fb08d2c036a50af62a1d919d122c1c784304ef49347"}, + {file = "hiredis-3.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:a26fa888025badb5563f283cc19594c215a413e905729e59a5f7cf3f46d66c32"}, + {file = "hiredis-3.1.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:f50763cd819d4a52a47b5966d4bb47dee34b637c5fa6402509800eee6ecb61e6"}, + {file = "hiredis-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b6d1c9e1fce5e0a94072667ae2bf0142b89ebbb1917d3531184e060a43f3ee11"}, + {file = "hiredis-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e38d7a56b1a79ed0bbb9e6fe376d82e3f4dcc646ae47472f2c858e19a597c112"}, + {file = "hiredis-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ef5ad8b91530e4d10a68562b0a380ea22705a60e88cecee086d7c63a38564ce"}, + {file = "hiredis-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf3d2299b054e57a9f97ca08704c2843e44f29b57dc69b76a2592ecd212efe1a"}, + {file = "hiredis-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93811d60b0f73d0f049c86f4373a3833b4a38fce374ab151074d929553eb4304"}, + {file = "hiredis-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e703ff860c1d83abbcf57012b309ead02b56b60e85150c6c3bfb37cbb16ebf"}, + {file = "hiredis-3.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f9ea0678806c53d96758e74c6a898f9d506a2e3367a344757f768bef9e069366"}, + {file = "hiredis-3.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cf6844035abf47d52a1c3f4257255af3bf3b0f14d559b08eaa45885418c6c55d"}, + {file = "hiredis-3.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7acf35cfa7ec9e1e7559c04e7095628f7d06049b5f24dcb58c1a55ef6dc689f8"}, + {file = "hiredis-3.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:b885695dce7a39b1fd9a609ed9c4cf312e53df2ec028d5a78af7a891b5fbea4d"}, + {file = "hiredis-3.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1c22fa74ddd063396b19fe8445a1ae8b4190eff755d5750dda48e860a45b2ee7"}, + {file = "hiredis-3.1.0-cp39-cp39-win32.whl", hash = "sha256:0614e16339f1784df3bbd2800322e20b4127d3f3a3509f00a5562efddb2521aa"}, + {file = "hiredis-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:c2bc713ee73ab9de4a0d68b0ab0f29612342b63173714742437b977584adb2d8"}, + {file = "hiredis-3.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:07ab990d0835f36bf358dbb84db4541ac0a8f533128ec09af8f80a576eef2e88"}, + {file = "hiredis-3.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c54a88eb9d8ebc4e5eefaadbe2102a4f7499f9e413654172f40aefd25350959"}, + {file = "hiredis-3.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8095ef159896e5999a795b0f80e4d64281301a109e442a8d29cd750ca6bd8303"}, + {file = "hiredis-3.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f8ca13e2476ffd6d5be4763f5868133506ddcfa5ce54b4dac231ebdc19be6c6"}, + {file = "hiredis-3.1.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34d25aa25c10f966d5415795ed271da84605044dbf436c054966cea5442451b3"}, + {file = "hiredis-3.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4180dc5f646b426e5fa1212e1348c167ee2a864b3a70d56579163d64a847dd1e"}, + {file = "hiredis-3.1.0-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d92144e0cd6e6e841a6ad343e9d58631626eeb4ac96b0322649379b5d4527447"}, + {file = "hiredis-3.1.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb91ba42903de637b94a1b64477f381f94ad82c0742c264f9245be76a7a3cbc"}, + {file = "hiredis-3.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ce71a797b5bc02c51da082428c00251ed6a7a67a03acbda5fbf9e8d028725f6"}, + {file = "hiredis-3.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e04c7feb9467e3170cd4d5bee381775783d81bbc45d6147c1c0ce3b50dc04f9"}, + {file = "hiredis-3.1.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a31806306a60f3565c04c964d6bee0e9d4a5120e1da589e41976b53972edf635"}, + {file = "hiredis-3.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:bc51f594c2c0863ded6501642dc96701ca8bbea9ced4fa3af0a1aeda8aa634cb"}, + {file = "hiredis-3.1.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4663a319ab7d22c597b9421e5ea384fd583e044f2f1ca9a1b98d4fef8a0fea2f"}, + {file = "hiredis-3.1.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8060fa256862b0c3de64a73ab45bc1ccf381caca464f2647af9075b200828948"}, + {file = "hiredis-3.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e9445b7f117a9c8c8ccad97cb44daa55ddccff3cbc9079984eac56d982ba01f"}, + {file = "hiredis-3.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:732cf1c5cf1324f7bf3b6086976fe62a2ca98f0bf6316f31063c2c67be8797bc"}, + {file = "hiredis-3.1.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2102a94063d878c40df92f55199637a74f535e3a0b79ceba4a00538853a21be3"}, + {file = "hiredis-3.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d968dde69e3fe903bf9ef00667669dcf04a3e096e33aaf138775106ead138bc8"}, + {file = "hiredis-3.1.0.tar.gz", hash = "sha256:51d40ac3611091020d7dea6b05ed62cb152bff595fa4f931e7b6479d777acf7c"}, ] [[package]] @@ -4316,86 +4290,87 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jiter" -version = "0.8.0" +version = "0.8.2" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" files = [ - {file = "jiter-0.8.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:dee4eeb293ffcd2c3b31ebab684dbf7f7b71fe198f8eddcdf3a042cc6e10205a"}, - {file = "jiter-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aad1e6e9b01cf0304dcee14db03e92e0073287a6297caf5caf2e9dbfea16a924"}, - {file = "jiter-0.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:504099fb7acdbe763e10690d560a25d4aee03d918d6a063f3a761d8a09fb833f"}, - {file = "jiter-0.8.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2373487caad7fe39581f588ab5c9262fc1ade078d448626fec93f4ffba528858"}, - {file = "jiter-0.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c341ecc3f9bccde952898b0c97c24f75b84b56a7e2f8bbc7c8e38cab0875a027"}, - {file = "jiter-0.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e48e7a336529b9419d299b70c358d4ebf99b8f4b847ed3f1000ec9f320e8c0c"}, - {file = "jiter-0.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5ee157a8afd2943be690db679f82fafb8d347a8342e8b9c34863de30c538d55"}, - {file = "jiter-0.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d7dceae3549b80087f913aad4acc2a7c1e0ab7cb983effd78bdc9c41cabdcf18"}, - {file = "jiter-0.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e29e9ecce53d396772590438214cac4ab89776f5e60bd30601f1050b34464019"}, - {file = "jiter-0.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fa1782f22d5f92c620153133f35a9a395d3f3823374bceddd3e7032e2fdfa0b1"}, - {file = "jiter-0.8.0-cp310-none-win32.whl", hash = "sha256:f754ef13b4e4f67a3bf59fe974ef4342523801c48bf422f720bd37a02a360584"}, - {file = "jiter-0.8.0-cp310-none-win_amd64.whl", hash = "sha256:796f750b65f5d605f5e7acaccc6b051675e60c41d7ac3eab40dbd7b5b81a290f"}, - {file = "jiter-0.8.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f6f4e645efd96b4690b9b6091dbd4e0fa2885ba5c57a0305c1916b75b4f30ff6"}, - {file = "jiter-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f61cf6d93c1ade9b8245c9f14b7900feadb0b7899dbe4aa8de268b705647df81"}, - {file = "jiter-0.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0396bc5cb1309c6dab085e70bb3913cdd92218315e47b44afe9eace68ee8adaa"}, - {file = "jiter-0.8.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62d0e42ec5dc772bd8554a304358220be5d97d721c4648b23f3a9c01ccc2cb26"}, - {file = "jiter-0.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec4b711989860705733fc59fb8c41b2def97041cea656b37cf6c8ea8dee1c3f4"}, - {file = "jiter-0.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:859cc35bf304ab066d88f10a44a3251a9cd057fb11ec23e00be22206db878f4f"}, - {file = "jiter-0.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5000195921aa293b39b9b5bc959d7fa658e7f18f938c0e52732da8e3cc70a278"}, - {file = "jiter-0.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36050284c0abde57aba34964d3920f3d6228211b65df7187059bb7c7f143759a"}, - {file = "jiter-0.8.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a88f608e050cfe45c48d771e86ecdbf5258314c883c986d4217cc79e1fb5f689"}, - {file = "jiter-0.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:646cf4237665b2e13b4159d8f26d53f59bc9f2e6e135e3a508a2e5dd26d978c6"}, - {file = "jiter-0.8.0-cp311-none-win32.whl", hash = "sha256:21fe5b8345db1b3023052b2ade9bb4d369417827242892051244af8fae8ba231"}, - {file = "jiter-0.8.0-cp311-none-win_amd64.whl", hash = "sha256:30c2161c5493acf6b6c3c909973fb64ae863747def01cc7574f3954e0a15042c"}, - {file = "jiter-0.8.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d91a52d8f49ada2672a4b808a0c5c25d28f320a2c9ca690e30ebd561eb5a1002"}, - {file = "jiter-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c38cf25cf7862f61410b7a49684d34eb3b5bcbd7ddaf4773eea40e0bd43de706"}, - {file = "jiter-0.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6189beb5c4b3117624be6b2e84545cff7611f5855d02de2d06ff68e316182be"}, - {file = "jiter-0.8.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e13fa849c0e30643554add089983caa82f027d69fad8f50acadcb21c462244ab"}, - {file = "jiter-0.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d7765ca159d0a58e8e0f8ca972cd6d26a33bc97b4480d0d2309856763807cd28"}, - {file = "jiter-0.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b0befe7c6e9fc867d5bed21bab0131dfe27d1fa5cd52ba2bced67da33730b7d"}, - {file = "jiter-0.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7d6363d4c6f1052b1d8b494eb9a72667c3ef5f80ebacfe18712728e85327000"}, - {file = "jiter-0.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a873e57009863eeac3e3969e4653f07031d6270d037d6224415074ac17e5505c"}, - {file = "jiter-0.8.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2582912473c0d9940791479fe1bf2976a34f212eb8e0a82ee9e645ac275c5d16"}, - {file = "jiter-0.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:646163201af42f55393ee6e8f6136b8df488253a6533f4230a64242ecbfe6048"}, - {file = "jiter-0.8.0-cp312-none-win32.whl", hash = "sha256:96e75c9abfbf7387cba89a324d2356d86d8897ac58c956017d062ad510832dae"}, - {file = "jiter-0.8.0-cp312-none-win_amd64.whl", hash = "sha256:ed6074552b4a32e047b52dad5ab497223721efbd0e9efe68c67749f094a092f7"}, - {file = "jiter-0.8.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:dd5e351cb9b3e676ec3360a85ea96def515ad2b83c8ae3a251ce84985a2c9a6f"}, - {file = "jiter-0.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ba9f12b0f801ecd5ed0cec29041dc425d1050922b434314c592fc30d51022467"}, - {file = "jiter-0.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7ba461c3681728d556392e8ae56fb44a550155a24905f01982317b367c21dd4"}, - {file = "jiter-0.8.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3a15ed47ab09576db560dbc5c2c5a64477535beb056cd7d997d5dd0f2798770e"}, - {file = "jiter-0.8.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cef55042816d0737142b0ec056c0356a5f681fb8d6aa8499b158e87098f4c6f8"}, - {file = "jiter-0.8.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:549f170215adeb5e866f10617c3d019d8eb4e6d4e3c6b724b3b8c056514a3487"}, - {file = "jiter-0.8.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f867edeb279d22020877640d2ea728de5817378c60a51be8af731a8a8f525306"}, - {file = "jiter-0.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aef8845f463093799db4464cee2aa59d61aa8edcb3762aaa4aacbec3f478c929"}, - {file = "jiter-0.8.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:d0d6e22e4062c3d3c1bf3594baa2f67fc9dcdda8275abad99e468e0c6540bc54"}, - {file = "jiter-0.8.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:079e62e64696241ac3f408e337aaac09137ed760ccf2b72b1094b48745c13641"}, - {file = "jiter-0.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74d2b56ed3da5760544df53b5f5c39782e68efb64dc3aa0bba4cc08815e6fae8"}, - {file = "jiter-0.8.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:798dafe108cba58a7bb0a50d4d5971f98bb7f3c974e1373e750de6eb21c1a329"}, - {file = "jiter-0.8.0-cp313-none-win32.whl", hash = "sha256:ca6d3064dfc743eb0d3d7539d89d4ba886957c717567adc72744341c1e3573c9"}, - {file = "jiter-0.8.0-cp313-none-win_amd64.whl", hash = "sha256:38caedda64fe1f04b06d7011fc15e86b3b837ed5088657bf778656551e3cd8f9"}, - {file = "jiter-0.8.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:bb5c8a0a8d081c338db22e5b8d53a89a121790569cbb85f7d3cfb1fe0fbe9836"}, - {file = "jiter-0.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:202dbe8970bfb166fab950eaab8f829c505730a0b33cc5e1cfb0a1c9dd56b2f9"}, - {file = "jiter-0.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9046812e5671fdcfb9ae02881fff1f6a14d484b7e8b3316179a372cdfa1e8026"}, - {file = "jiter-0.8.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e6ac56425023e52d65150918ae25480d0a1ce2a6bf5ea2097f66a2cc50f6d692"}, - {file = "jiter-0.8.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dfcf97210c6eab9d2a1c6af15dd39e1d5154b96a7145d0a97fa1df865b7b834"}, - {file = "jiter-0.8.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4e3c8444d418686f78c9a547b9b90031faf72a0a1a46bfec7fb31edbd889c0d"}, - {file = "jiter-0.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6507011a299b7f578559084256405a8428875540d8d13530e00b688e41b09493"}, - {file = "jiter-0.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0aae4738eafdd34f0f25c2d3668ce9e8fa0d7cb75a2efae543c9a69aebc37323"}, - {file = "jiter-0.8.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5d782e790396b13f2a7b36bdcaa3736a33293bdda80a4bf1a3ce0cd5ef9f15"}, - {file = "jiter-0.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc7f993bc2c4e03015445adbb16790c303282fce2e8d9dc3a3905b1d40e50564"}, - {file = "jiter-0.8.0-cp38-none-win32.whl", hash = "sha256:d4a8a6eda018a991fa58ef707dd51524055d11f5acb2f516d70b1be1d15ab39c"}, - {file = "jiter-0.8.0-cp38-none-win_amd64.whl", hash = "sha256:4cca948a3eda8ea24ed98acb0ee19dc755b6ad2e570ec85e1527d5167f91ff67"}, - {file = "jiter-0.8.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ef89663678d8257063ce7c00d94638e05bd72f662c5e1eb0e07a172e6c1a9a9f"}, - {file = "jiter-0.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c402ddcba90b4cc71db3216e8330f4db36e0da2c78cf1d8a9c3ed8f272602a94"}, - {file = "jiter-0.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a6dfe795b7a173a9f8ba7421cdd92193d60c1c973bbc50dc3758a9ad0fa5eb6"}, - {file = "jiter-0.8.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ec29a31b9abd6be39453a2c45da067138a3005d65d2c0507c530e0f1fdcd9a4"}, - {file = "jiter-0.8.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a488f8c54bddc3ddefaf3bfd6de4a52c97fc265d77bc2dcc6ee540c17e8c342"}, - {file = "jiter-0.8.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aeb5561adf4d26ca0d01b5811b4d7b56a8986699a473d700757b4758ef787883"}, - {file = "jiter-0.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ab961858d7ad13132328517d29f121ae1b2d94502191d6bcf96bddcc8bb5d1c"}, - {file = "jiter-0.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a207e718d114d23acf0850a2174d290f42763d955030d9924ffa4227dbd0018f"}, - {file = "jiter-0.8.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:733bc9dc8ff718a0ae4695239e9268eb93e88b73b367dfac3ec227d8ce2f1e77"}, - {file = "jiter-0.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1ec27299e22d05e13a06e460bf7f75f26f9aaa0e0fb7d060f40e88df1d81faa"}, - {file = "jiter-0.8.0-cp39-none-win32.whl", hash = "sha256:e8dbfcb46553e6661d3fc1f33831598fcddf73d0f67834bce9fc3e9ebfe5c439"}, - {file = "jiter-0.8.0-cp39-none-win_amd64.whl", hash = "sha256:af2ce2487b3a93747e2cb5150081d4ae1e5874fce5924fc1a12e9e768e489ad8"}, - {file = "jiter-0.8.0.tar.gz", hash = "sha256:86fee98b569d4cc511ff2e3ec131354fafebd9348a487549c31ad371ae730310"}, + {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, + {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c826a221851a8dc028eb6d7d6429ba03184fa3c7e83ae01cd6d3bd1d4bd17d"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d35c864c2dff13dfd79fb070fc4fc6235d7b9b359efe340e1261deb21b9fcb66"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f557c55bc2b7676e74d39d19bcb8775ca295c7a028246175d6a8b431e70835e5"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:580ccf358539153db147e40751a0b41688a5ceb275e6f3e93d91c9467f42b2e3"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af102d3372e917cffce49b521e4c32c497515119dc7bd8a75665e90a718bbf08"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cadcc978f82397d515bb2683fc0d50103acff2a180552654bb92d6045dec2c49"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba5bdf56969cad2019d4e8ffd3f879b5fdc792624129741d3d83fc832fef8c7d"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3b94a33a241bee9e34b8481cdcaa3d5c2116f575e0226e421bed3f7a6ea71cff"}, + {file = "jiter-0.8.2-cp310-cp310-win32.whl", hash = "sha256:6e5337bf454abddd91bd048ce0dca5134056fc99ca0205258766db35d0a2ea43"}, + {file = "jiter-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:4a9220497ca0cb1fe94e3f334f65b9b5102a0b8147646118f020d8ce1de70105"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2dd61c5afc88a4fda7d8b2cf03ae5947c6ac7516d32b7a15bf4b49569a5c076b"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a6c710d657c8d1d2adbbb5c0b0c6bfcec28fd35bd6b5f016395f9ac43e878a15"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9584de0cd306072635fe4b89742bf26feae858a0683b399ad0c2509011b9dc0"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a90a923338531b7970abb063cfc087eebae6ef8ec8139762007188f6bc69a9f"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21974d246ed0181558087cd9f76e84e8321091ebfb3a93d4c341479a736f099"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32475a42b2ea7b344069dc1e81445cfc00b9d0e3ca837f0523072432332e9f74"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9931fd36ee513c26b5bf08c940b0ac875de175341cbdd4fa3be109f0492586"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0820f4a3a59ddced7fce696d86a096d5cc48d32a4183483a17671a61edfddc"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ffc86ae5e3e6a93765d49d1ab47b6075a9c978a2b3b80f0f32628f39caa0c88"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5127dc1abd809431172bc3fbe8168d6b90556a30bb10acd5ded41c3cfd6f43b6"}, + {file = "jiter-0.8.2-cp311-cp311-win32.whl", hash = "sha256:66227a2c7b575720c1871c8800d3a0122bb8ee94edb43a5685aa9aceb2782d44"}, + {file = "jiter-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:cde031d8413842a1e7501e9129b8e676e62a657f8ec8166e18a70d94d4682855"}, + {file = "jiter-0.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e6ec2be506e7d6f9527dae9ff4b7f54e68ea44a0ef6b098256ddf895218a2f8f"}, + {file = "jiter-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76e324da7b5da060287c54f2fabd3db5f76468006c811831f051942bf68c9d44"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:180a8aea058f7535d1c84183c0362c710f4750bef66630c05f40c93c2b152a0f"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025337859077b41548bdcbabe38698bcd93cfe10b06ff66617a48ff92c9aec60"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecff0dc14f409599bbcafa7e470c00b80f17abc14d1405d38ab02e4b42e55b57"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffd9fee7d0775ebaba131f7ca2e2d83839a62ad65e8e02fe2bd8fc975cedeb9e"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14601dcac4889e0a1c75ccf6a0e4baf70dbc75041e51bcf8d0e9274519df6887"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92249669925bc1c54fcd2ec73f70f2c1d6a817928480ee1c65af5f6b81cdf12d"}, + {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e725edd0929fa79f8349ab4ec7f81c714df51dc4e991539a578e5018fa4a7152"}, + {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bf55846c7b7a680eebaf9c3c48d630e1bf51bdf76c68a5f654b8524335b0ad29"}, + {file = "jiter-0.8.2-cp312-cp312-win32.whl", hash = "sha256:7efe4853ecd3d6110301665a5178b9856be7e2a9485f49d91aa4d737ad2ae49e"}, + {file = "jiter-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:83c0efd80b29695058d0fd2fa8a556490dbce9804eac3e281f373bbc99045f6c"}, + {file = "jiter-0.8.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ca1f08b8e43dc3bd0594c992fb1fd2f7ce87f7bf0d44358198d6da8034afdf84"}, + {file = "jiter-0.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5672a86d55416ccd214c778efccf3266b84f87b89063b582167d803246354be4"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58dc9bc9767a1101f4e5e22db1b652161a225874d66f0e5cb8e2c7d1c438b587"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b2998606d6dadbb5ccda959a33d6a5e853252d921fec1792fc902351bb4e2c"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ab9a87f3784eb0e098f84a32670cfe4a79cb6512fd8f42ae3d0709f06405d18"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79aec8172b9e3c6d05fd4b219d5de1ac616bd8da934107325a6c0d0e866a21b6"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:711e408732d4e9a0208008e5892c2966b485c783cd2d9a681f3eb147cf36c7ef"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:653cf462db4e8c41995e33d865965e79641ef45369d8a11f54cd30888b7e6ff1"}, + {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:9c63eaef32b7bebac8ebebf4dabebdbc6769a09c127294db6babee38e9f405b9"}, + {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:eb21aaa9a200d0a80dacc7a81038d2e476ffe473ffdd9c91eb745d623561de05"}, + {file = "jiter-0.8.2-cp313-cp313-win32.whl", hash = "sha256:789361ed945d8d42850f919342a8665d2dc79e7e44ca1c97cc786966a21f627a"}, + {file = "jiter-0.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:ab7f43235d71e03b941c1630f4b6e3055d46b6cb8728a17663eaac9d8e83a865"}, + {file = "jiter-0.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b426f72cd77da3fec300ed3bc990895e2dd6b49e3bfe6c438592a3ba660e41ca"}, + {file = "jiter-0.8.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2dd880785088ff2ad21ffee205e58a8c1ddabc63612444ae41e5e4b321b39c0"}, + {file = "jiter-0.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:3ac9f578c46f22405ff7f8b1f5848fb753cc4b8377fbec8470a7dc3997ca7566"}, + {file = "jiter-0.8.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9e1fa156ee9454642adb7e7234a383884452532bc9d53d5af2d18d98ada1d79c"}, + {file = "jiter-0.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cf5dfa9956d96ff2efb0f8e9c7d055904012c952539a774305aaaf3abdf3d6c"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e52bf98c7e727dd44f7c4acb980cb988448faeafed8433c867888268899b298b"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a2ecaa3c23e7a7cf86d00eda3390c232f4d533cd9ddea4b04f5d0644faf642c5"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08d4c92bf480e19fc3f2717c9ce2aa31dceaa9163839a311424b6862252c943e"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d9a1eded738299ba8e106c6779ce5c3893cffa0e32e4485d680588adae6db8"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20be8b7f606df096e08b0b1b4a3c6f0515e8dac296881fe7461dfa0fb5ec817"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d33f94615fcaf872f7fd8cd98ac3b429e435c77619777e8a449d9d27e01134d1"}, + {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:317b25e98a35ffec5c67efe56a4e9970852632c810d35b34ecdd70cc0e47b3b6"}, + {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc9043259ee430ecd71d178fccabd8c332a3bf1e81e50cae43cc2b28d19e4cb7"}, + {file = "jiter-0.8.2-cp38-cp38-win32.whl", hash = "sha256:fc5adda618205bd4678b146612ce44c3cbfdee9697951f2c0ffdef1f26d72b63"}, + {file = "jiter-0.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cd646c827b4f85ef4a78e4e58f4f5854fae0caf3db91b59f0d73731448a970c6"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e41e75344acef3fc59ba4765df29f107f309ca9e8eace5baacabd9217e52a5ee"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f22b16b35d5c1df9dfd58843ab2cd25e6bf15191f5a236bed177afade507bfc"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7200b8f7619d36aa51c803fd52020a2dfbea36ffec1b5e22cab11fd34d95a6d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70bf4c43652cc294040dbb62256c83c8718370c8b93dd93d934b9a7bf6c4f53c"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9d471356dc16f84ed48768b8ee79f29514295c7295cb41e1133ec0b2b8d637d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:859e8eb3507894093d01929e12e267f83b1d5f6221099d3ec976f0c995cb6bd9"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa58399c01db555346647a907b4ef6d4f584b123943be6ed5588c3f2359c9f4"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8f2d5ed877f089862f4c7aacf3a542627c1496f972a34d0474ce85ee7d939c27"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:03c9df035d4f8d647f8c210ddc2ae0728387275340668fb30d2421e17d9a0841"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8bd2a824d08d8977bb2794ea2682f898ad3d8837932e3a74937e93d62ecbb637"}, + {file = "jiter-0.8.2-cp39-cp39-win32.whl", hash = "sha256:ca29b6371ebc40e496995c94b988a101b9fbbed48a51190a4461fcb0a68b4a36"}, + {file = "jiter-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1c0dfbd1be3cbefc7510102370d86e35d1d53e5a93d48519688b1bf0f761160a"}, + {file = "jiter-0.8.2.tar.gz", hash = "sha256:cd73d3e740666d0e639f678adb176fad25c1bcbdae88d8d7b857e1783bb4212d"}, ] [[package]] @@ -4787,13 +4762,13 @@ files = [ [[package]] name = "loguru" -version = "0.7.2" +version = "0.7.3" description = "Python logging made (stupidly) simple" optional = false -python-versions = ">=3.5" +python-versions = "<4.0,>=3.5" files = [ - {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, - {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, + {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, + {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, ] [package.dependencies] @@ -4801,7 +4776,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] +dev = ["Sphinx (==8.1.3)", "build (==1.2.2)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.5.0)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.13.0)", "mypy (==v1.4.1)", "myst-parser (==4.0.0)", "pre-commit (==4.0.1)", "pytest (==6.1.2)", "pytest (==8.3.2)", "pytest-cov (==2.12.1)", "pytest-cov (==5.0.0)", "pytest-cov (==6.0.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.1.0)", "sphinx-rtd-theme (==3.0.2)", "tox (==3.27.1)", "tox (==4.23.2)", "twine (==6.0.1)"] [[package]] name = "lxml" @@ -5026,13 +5001,13 @@ urllib3 = ">=1.23" [[package]] name = "mako" -version = "1.3.7" +version = "1.3.8" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.7-py3-none-any.whl", hash = "sha256:d18f990ad57f800ce8e76cbfb0b74afe471c293517e9f5003ace6dad5aa72c36"}, - {file = "mako-1.3.7.tar.gz", hash = "sha256:20405b1232e0759f0e7d87b01f6bb94fce0761747f1cb876ecf90bd512d0b639"}, + {file = "Mako-1.3.8-py3-none-any.whl", hash = "sha256:42f48953c7eb91332040ff567eb7eea69b22e7a4affbc5ba8e845e8f730f6627"}, + {file = "mako-1.3.8.tar.gz", hash = "sha256:577b97e414580d3e088d47c2dbbe9594aa7a5146ed2875d4dfa9075af2dd3cc8"}, ] [package.dependencies] @@ -6048,6 +6023,27 @@ files = [ [package.dependencies] opencensus = ">=0.8.0,<1.0.0" +[[package]] +name = "opendal" +version = "0.45.12" +description = "Apache OpenDAL™ Python Binding" +optional = false +python-versions = ">=3.11" +files = [ + {file = "opendal-0.45.12-cp311-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:fd6551780194870867ed205135d5e7e2d411145d3cc4faa63830f54bbf48acdb"}, + {file = "opendal-0.45.12-cp311-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6fb9dc5021c5a62785fdf81a2d6ab97b65b8ef86ccded119fe242a10655263"}, + {file = "opendal-0.45.12-cp311-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6803edda7c0936722ecc5c2cf01fd84dcb520f11e1643f285605451df6b7c20b"}, + {file = "opendal-0.45.12-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:389908b68845991f5bf4e75fbf1b415f14b02fab3201d01f5b3a9ae0030ee164"}, + {file = "opendal-0.45.12-cp311-abi3-win_amd64.whl", hash = "sha256:c87f488c547e17174d53f98da1c25135595bf93c29bab731d732f55d993534e0"}, + {file = "opendal-0.45.12.tar.gz", hash = "sha256:5b35a1abf6a30a6dc82e343a5c8403f245c89125cc037c2b89ed7803409c717c"}, +] + +[package.extras] +benchmark = ["boto3", "boto3-stubs[essential]", "gevent", "greenify", "greenlet", "pydantic"] +docs = ["pdoc"] +lint = ["ruff"] +test = ["pytest", "pytest-asyncio", "python-dotenv"] + [[package]] name = "openpyxl" version = "3.1.5" @@ -6795,20 +6791,20 @@ dill = ["dill (>=0.3.9)"] [[package]] name = "primp" -version = "0.8.1" +version = "0.8.2" description = "HTTP client that can impersonate web browsers, mimicking their headers and `TLS/JA3/JA4/HTTP2` fingerprints" optional = false python-versions = ">=3.8" files = [ - {file = "primp-0.8.1-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8294db817701ad76b6a186c16e22cc49d36fac5986647a83657ad4a58ddeee42"}, - {file = "primp-0.8.1-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:e8117531dcdb0dbcf9855fdbac73febdde5967ca0332a2c05b5961d2fbcfe749"}, - {file = "primp-0.8.1-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:993cc4284e8c5c858254748f078e872ba250c9339d64398dc000a8f9cffadda3"}, - {file = "primp-0.8.1-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4a27ac642be5c616fc5f139a5ad391dcd0c5964ace56fe6cf31cbffb972a7480"}, - {file = "primp-0.8.1-cp38-abi3-manylinux_2_34_armv7l.whl", hash = "sha256:e8483b8d9eec9fc43d77bb448555466030f29cdd99d9375eb75155e9f832e5bd"}, - {file = "primp-0.8.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:92f5f8267216252cfb27f2149811e14682bb64f0c5d37f00d218d1592e02f0b9"}, - {file = "primp-0.8.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:98f7f3a9481c55c56e7eff9024f29e16379a87d5b0a1b683e145dd8fcbdcc46b"}, - {file = "primp-0.8.1-cp38-abi3-win_amd64.whl", hash = "sha256:6f0018a26be787431504e32548b296a278abbe85da43bcbaf2d4982ac3dcd332"}, - {file = "primp-0.8.1.tar.gz", hash = "sha256:ddf05754a7b70d59df8a014a8585e418f9c04e0b69065bab6633f4a9b92bad93"}, + {file = "primp-0.8.2-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:20c4988c6538dfcac804e804f286493696e53498d5705e745a36d9fe436c787c"}, + {file = "primp-0.8.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:dde74d6bf5534a60fd075e81b5828a6591753a647c5bfe69e664883e5c7a28bb"}, + {file = "primp-0.8.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f988d7e47d7f63b63f851885d51abd86ba3a2a1981d047466c1e63827753a168"}, + {file = "primp-0.8.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:965cf0c19986d074d4e20ce18f1b81e5c31818324718814af6317a291a3aba65"}, + {file = "primp-0.8.2-cp38-abi3-manylinux_2_34_armv7l.whl", hash = "sha256:afc56989ae09bed76105bf045e666ea2da5f32e2e93dfb967795a4da4fc777e5"}, + {file = "primp-0.8.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:64e8b9b216ee0f52d2885ac23303000339f798a59eb9b4b3b747dcbbf9187beb"}, + {file = "primp-0.8.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b65de6d8fe4c7ef9d5d508e2a9cee3da77455e3a44c9282bdebb2134c55087c9"}, + {file = "primp-0.8.2-cp38-abi3-win_amd64.whl", hash = "sha256:d686cf4ce21c318bafe2f0574aec9f7f9526d18a4b0c017f507bd007f323e519"}, + {file = "primp-0.8.2.tar.gz", hash = "sha256:572ecd34b77021a89a0574b66b07e1da100afd6ec490d3b519a6763fac6ae6c5"}, ] [package.extras] @@ -7407,13 +7403,13 @@ rsa = ["cryptography"] [[package]] name = "pyobvector" -version = "0.1.16" +version = "0.1.17" description = "A python SDK for OceanBase Vector Store, based on SQLAlchemy, compatible with Milvus API." optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "pyobvector-0.1.16-py3-none-any.whl", hash = "sha256:d2ec2f974f0a32b65fa1558a39e7cb36d8e14b2192a7d603990f183c5cae79d7"}, - {file = "pyobvector-0.1.16.tar.gz", hash = "sha256:2d0fdd90d85cdfc8dc1d7b6950cd4fbb160a0696065c7d6ebdf70d09745896c5"}, + {file = "pyobvector-0.1.17-py3-none-any.whl", hash = "sha256:faf73d14ded736f21f2ce9d92d0964de9d477afeacfbf6d41db0b5b18495aadd"}, + {file = "pyobvector-0.1.17.tar.gz", hash = "sha256:bfc89f8de88806b63d64d7dfc15e5f9890243387ba53cc69247de52a46045d5a"}, ] [package.dependencies] @@ -8427,114 +8423,114 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.22.1" +version = "0.22.3" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" files = [ - {file = "rpds_py-0.22.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ab27dd4edd84b13309f268ffcdfc07aef8339135ffab7b6d43f16884307a2a48"}, - {file = "rpds_py-0.22.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9d5b925156a746dc1f5f52376fdd1fbdd3f6ffe1fcd6f5e06f77ca79abb940a3"}, - {file = "rpds_py-0.22.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201650b309c419143775c15209c620627de3c09a27c7fb58375325aec5cce260"}, - {file = "rpds_py-0.22.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31264187fc934ff1024a4f56775f33c9252d3f4f3e27ec07d1995a26b52702c3"}, - {file = "rpds_py-0.22.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97c5ffe47ccf92d8b17e10f8a5ce28d015aa1196edc3359684cf31504eae6a14"}, - {file = "rpds_py-0.22.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9ac7280bd045f472b50306d7efeee051b69e3a2dd1b90f46bd7e86e63b1efa2"}, - {file = "rpds_py-0.22.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f941fb86195f97be7f6efe04a21b223f05dfe4d1dfb159999e2f8d101e44cc4"}, - {file = "rpds_py-0.22.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f91bfc39f7a64168e08ab831fa497ec5438c1d6c6e2f9e12848d95ad11ac8523"}, - {file = "rpds_py-0.22.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:effcae2152afe7937a28376dbabb25c770ef99ed4e16a4ffeb8e6a4f7c4f06aa"}, - {file = "rpds_py-0.22.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2177e59c033bf0d1bf7de1ced561205963583caf3242c6c700a723034bfb5f8e"}, - {file = "rpds_py-0.22.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:66f4f48a89cdd30ab3a47335df81c76e9a63799d0d84b29c0618371c66fa37b0"}, - {file = "rpds_py-0.22.1-cp310-cp310-win32.whl", hash = "sha256:b07fa9e634234e84096adfa4be3828c8f26e238679c122824b2b3d7131bec578"}, - {file = "rpds_py-0.22.1-cp310-cp310-win_amd64.whl", hash = "sha256:ca4657e9fd0b1b5376942d403d634ce188f79064f0873aa853ab05b10185ceec"}, - {file = "rpds_py-0.22.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:608c84699b2db09c6a8743845b1a3dad36fae53eaaecb241d45b13dff74405fb"}, - {file = "rpds_py-0.22.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9dae4eb9b5534e09ba6c6ab496a757e5e394b7e7b08767d25ca37e8d36491114"}, - {file = "rpds_py-0.22.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09a1f000c5f6e08b298275bae00921e9fbbf2a35dae0a86db2821c058c2201a9"}, - {file = "rpds_py-0.22.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:580ccbf11f02f948add4cb641843030a89f1463d7c0740cbfc9aca91e9dc34b3"}, - {file = "rpds_py-0.22.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96559e05bdf938b2048353e10a7920b98f853cefe4482c2064a718d7d0a50bd7"}, - {file = "rpds_py-0.22.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128cbaed7ba26116820bcb992405d6a13ea18c8fca1b8c4f59906d858e91e979"}, - {file = "rpds_py-0.22.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:734783dd7da58f76222f458346ddebdb3621686a1a2a667db5049caf0c9956b9"}, - {file = "rpds_py-0.22.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c9ce6b83597d45bec44a2690857ede62fc98223772135f8a7fa90884eb726501"}, - {file = "rpds_py-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bca4428c4a957b78ded3e6e62884ab03f029dce8fa8d34818da0f80f61332b49"}, - {file = "rpds_py-0.22.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1ded65691a1d3fd7d2aa89d2c91aa51f941601bb2ce099739909034d957fef4b"}, - {file = "rpds_py-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72407065ad459db9f3d052ea8c51e02534f02533fc61e51cbab3bd94166f086c"}, - {file = "rpds_py-0.22.1-cp311-cp311-win32.whl", hash = "sha256:eb013aa01b404219f28dc973d9e6310fd4db216d7299253dd355629952e0564e"}, - {file = "rpds_py-0.22.1-cp311-cp311-win_amd64.whl", hash = "sha256:8bd9ec1db79a664f4cbb12878693b73416f4d2cb425d3e27eccc1bdfbdc826ef"}, - {file = "rpds_py-0.22.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8ec41049c90d204a6561238a9ad6c7263ebb7009d9759c98b58078d9d2fec9ba"}, - {file = "rpds_py-0.22.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:102be79c4cc47a4aeb5912401185c404cd2601c15a7163bbecff7f1bfe20b669"}, - {file = "rpds_py-0.22.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a603155db408f773637f9e3a712c6e3cbc521aaa8fa2b99f9ba6106c59a2496"}, - {file = "rpds_py-0.22.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5dbff9402c2bdf00bf0df9905694b3c292a3847c725651938a72f554351a5fcb"}, - {file = "rpds_py-0.22.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96b3759d8ab2323324e0a92b2f44834f9d88089b8d1ab6f533b61f4be3411cef"}, - {file = "rpds_py-0.22.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3029f481b31f329b1fdb4ec4b56935d82210ddd9c6f86ea5a87c06f1e97b161"}, - {file = "rpds_py-0.22.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d280b4bf09f719b89fd9aab3b71067acc0d0449b7d1eba99a2ade4939cef8296"}, - {file = "rpds_py-0.22.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8e97e19aa7b0b0d801a159f932ce4435f1049c8c38e2bb372bb5bee559ce50"}, - {file = "rpds_py-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:50e4b5d291105f7063259fe0125b1af902fb34499444d7c5c521dd8328b00939"}, - {file = "rpds_py-0.22.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d3777c446bb1c5fcd82dc3f8776e1a146cd91e80cc1892f8634575ace438d22f"}, - {file = "rpds_py-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:447ae1104fb32197b9262f772d565d38e834cc2e9edd89350b37b88fed636e70"}, - {file = "rpds_py-0.22.1-cp312-cp312-win32.whl", hash = "sha256:55d371b9d8b0c2a68a50413a8cb01c3c3ce1ea4f768bf77b66669a9a486e101e"}, - {file = "rpds_py-0.22.1-cp312-cp312-win_amd64.whl", hash = "sha256:413a30a99d8683dace3765885920ed27ab662efbb6c98d81db76c397ad1ffd71"}, - {file = "rpds_py-0.22.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa2ba0176037c915d8660a4e46581d645e2c22b5373e466bc8640a794d45861a"}, - {file = "rpds_py-0.22.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4ba6c66fbc6015b2f99e7176fec41793cecb00c4cc357cad038dff85e6ac42ab"}, - {file = "rpds_py-0.22.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15fa4ca658f8ad22645d3531682b17e5580832efbfa87304c3e62214c79c1e8a"}, - {file = "rpds_py-0.22.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7833ef6f5d6cb634f296abfd93452fb3eb44c4e9a6ae95c1021eab704c1cee2"}, - {file = "rpds_py-0.22.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0467838c90435b80793cde486a318fc916ee57f2af54e4b10c72b20cbdcbaa9"}, - {file = "rpds_py-0.22.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d962e2e89b3a95e3597a34b8c93ced1e98958502c5b8096c9fd69deff279f561"}, - {file = "rpds_py-0.22.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ce729f1dc8a4a190c34b69f75377bddc004079b2963ab722ab91fafe040be6d"}, - {file = "rpds_py-0.22.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8080467df22feca0fc9c46567001777c6fbc2b4a2683a7137420896051874ca1"}, - {file = "rpds_py-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0f9eb37d3a60b262a98ab51ee899cac039de9ca0ce68dcf1a6518a09719020b0"}, - {file = "rpds_py-0.22.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:153248f48d6f90a295a502f53ec544a3ffbd21b0bb32f5dca39c4b93a764d6a2"}, - {file = "rpds_py-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0a53592cdf98cec3dfcdb24ffec8a4797e7656b65700099af43ec7df023b6de4"}, - {file = "rpds_py-0.22.1-cp313-cp313-win32.whl", hash = "sha256:e8056adcefa2dcb67e8bc91ea5eee26df66e8b297a8cd6ff0903f85c70908fa0"}, - {file = "rpds_py-0.22.1-cp313-cp313-win_amd64.whl", hash = "sha256:a451dba533be77454ebcffc85189108fc05f279100835ac76e7989edacb89156"}, - {file = "rpds_py-0.22.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:2ea23f1525d4f64286dbe0947c929d45c3ffe963b2dbed1d3844a2e4938bda42"}, - {file = "rpds_py-0.22.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3aaa22487477de9618ce3b37f99fbe81219ba96f3c2ca84f576f0ab451b83aba"}, - {file = "rpds_py-0.22.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8954b9ffe60f479a0c0ba40987db2546c735ab02a725ea7fd89342152d4d821d"}, - {file = "rpds_py-0.22.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8502a02ae3ae67084f5a0bf5a8253b19fa7a887f824e41e016cdb0ac532a06f"}, - {file = "rpds_py-0.22.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a083221b6a4ecdef38a60c95d8d3223d99449cb4da2544e9644958dc16664eb9"}, - {file = "rpds_py-0.22.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:542eb246d5be31b5e0a9c8ddb9539416f9b31f58f75bd4ee328bff2b5c58d6fd"}, - {file = "rpds_py-0.22.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffae97d28ea4f2c613a751d087b75a97fb78311b38cc2e9a2f4587e473ace167"}, - {file = "rpds_py-0.22.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0ff8d5b13ce2357fa8b33a0a2e3775aa71df5bf7c8ba060634c9d15ab12f357"}, - {file = "rpds_py-0.22.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f057a0c546c42964836b209d8de9ea1a4f4b0432006c6343cbe633d8ca14571"}, - {file = "rpds_py-0.22.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:48ee97c7c6027fd423058675b5a39d0b5f7a1648250b671563d5c9f74ff13ff0"}, - {file = "rpds_py-0.22.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:babec324e8654a59122aaa66936a9a483faa03276db9792f51332475c2dddc4a"}, - {file = "rpds_py-0.22.1-cp313-cp313t-win32.whl", hash = "sha256:e69acdbc132c9592c8dc393af85e38e206ca847c7019a953ff625191c3a12312"}, - {file = "rpds_py-0.22.1-cp313-cp313t-win_amd64.whl", hash = "sha256:c783e4ed68200f4e03c125690d23158b1c49c4b186d458a18debc109bbdc3c2e"}, - {file = "rpds_py-0.22.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2143c3aed85992604d758bbe67da839fb4aab3dd2e1c6dddab5b3ca7162b34a2"}, - {file = "rpds_py-0.22.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f57e2d0f8022783426121b586d7c842ea40ea832a29e28ca36c881b54c74fb28"}, - {file = "rpds_py-0.22.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c0c324879d483504b07f7b18eb1b50567c434263bbe4866ecce33056162668a"}, - {file = "rpds_py-0.22.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1c40e02cc4f3e18fd39344edb10eebe04bd11cfd13119606b5771e5ea51630d3"}, - {file = "rpds_py-0.22.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f76c6f319e57007ad52e671ec741d801324760a377e3d4992c9bb8200333ebac"}, - {file = "rpds_py-0.22.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5cae9b415ea8a6a563566dbf46650222eccc5971c7daa16fbee63aef92ae543"}, - {file = "rpds_py-0.22.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b09209cdfcacf5eba9cf80367130532e6c02e695252e1f64d3cfcc2356e6e19f"}, - {file = "rpds_py-0.22.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbe428d0ac6eacaf05402adbaf137f59ad6063848182d1ff294f95ce0f24005b"}, - {file = "rpds_py-0.22.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:626b9feb01bff049a5aec4804f0c58db12585778b4902e5376a95b01f80a7a16"}, - {file = "rpds_py-0.22.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec1ccc2a9f764cd632fb8ab28fdde166250df54fc8d97315a4a6948dc5367639"}, - {file = "rpds_py-0.22.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ef92b1fbe6aa2e7885eb90853cc016b1fc95439a8cc8da6d526880e9e2148695"}, - {file = "rpds_py-0.22.1-cp39-cp39-win32.whl", hash = "sha256:c88535f83f7391cf3a45af990237e3939a6fdfbedaed2571633bfdd0bceb36b0"}, - {file = "rpds_py-0.22.1-cp39-cp39-win_amd64.whl", hash = "sha256:7839b7528faa4d134c183b1f2dd1ee4dc2ca2f899f4f0cfdf00fc04c255262a7"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a0ed14a4162c2c2b21a162c9fcf90057e3e7da18cd171ab344c1e1664f75090e"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:05fdeae9010533e47715c37df83264df0122584e40d691d50cf3607c060952a3"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4659b2e4a5008715099e216050f5c6976e5a4329482664411789968b82e3f17d"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a18aedc032d6468b73ebbe4437129cb30d54fe543cde2f23671ecad76c3aea24"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149b4d875ef9b12a8f5e303e86a32a58f8ef627e57ec97a7d0e4be819069d141"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdaee3947eaaa52dae3ceb9d9f66329e13d8bae35682b1e5dd54612938693934"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36ce951800ed2acc6772fd9f42150f29d567f0423989748052fdb39d9e2b5795"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ab784621d3e2a41916e21f13a483602cc989fd45fff637634b9231ba43d4383b"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c2a214bf5b79bd39a9de1c991353aaaacafda83ba1374178309e92be8e67d411"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:85060e96953647871957d41707adb8d7bff4e977042fd0deb4fc1881b98dd2fe"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c6f3fd617db422c9d4e12cb8d84c984fe07d6d9cb0950cbf117f3bccc6268d05"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f2d1b58a0c3a73f0361759642e80260a6d28eee6501b40fe25b82af33ef83f21"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:76eaa4c087a061a2c8a0a92536405069878a8f530c00e84a9eaf332e70f5561f"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:959ae04ed30cde606f3a0320f0a1f4167a107e685ef5209cce28c5080590bd31"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:198067aa6f3d942ff5d0d655bb1e91b59ae85279d47590682cba2834ac1b97d2"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3e7e99e2af59c56c59b6c964d612511b8203480d39d1ef83edc56f2cb42a3f5d"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0545928bdf53dfdfcab284468212efefb8a6608ca3b6910c7fb2e5ed8bdc2dc0"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef7282d8a14b60dd515e47060638687710b1d518f4b5e961caad43fb3a3606f9"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe3f245c2f39a5692d9123c174bc48f6f9fe3e96407e67c6d04541a767d99e72"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efb2ad60ca8637d5f9f653f9a9a8d73964059972b6b95036be77e028bffc68a3"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d8306f27418361b788e3fca9f47dec125457f80122e7e31ba7ff5cdba98343f8"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4c8dc7331e8cbb1c0ea2bcb550adb1777365944ffd125c69aa1117fdef4887f5"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:776a06cb5720556a549829896a49acebb5bdd96c7bba100191a994053546975a"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e4f91d702b9ce1388660b3d4a28aa552614a1399e93f718ed0dacd68f23b3d32"}, - {file = "rpds_py-0.22.1.tar.gz", hash = "sha256:157a023bded0618a1eea54979fe2e0f9309e9ddc818ef4b8fc3b884ff38fedd5"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, + {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, + {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, + {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, + {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, + {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, + {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, + {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, + {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, + {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, + {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, + {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, ] [[package]] @@ -8553,29 +8549,29 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.8.1" +version = "0.8.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.8.1-py3-none-linux_armv6l.whl", hash = "sha256:fae0805bd514066f20309f6742f6ee7904a773eb9e6c17c45d6b1600ca65c9b5"}, - {file = "ruff-0.8.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8a4f7385c2285c30f34b200ca5511fcc865f17578383db154e098150ce0a087"}, - {file = "ruff-0.8.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:cd054486da0c53e41e0086e1730eb77d1f698154f910e0cd9e0d64274979a209"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2029b8c22da147c50ae577e621a5bfbc5d1fed75d86af53643d7a7aee1d23871"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2666520828dee7dfc7e47ee4ea0d928f40de72056d929a7c5292d95071d881d1"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:333c57013ef8c97a53892aa56042831c372e0bb1785ab7026187b7abd0135ad5"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:288326162804f34088ac007139488dcb43de590a5ccfec3166396530b58fb89d"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b12c39b9448632284561cbf4191aa1b005882acbc81900ffa9f9f471c8ff7e26"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:364e6674450cbac8e998f7b30639040c99d81dfb5bbc6dfad69bc7a8f916b3d1"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b22346f845fec132aa39cd29acb94451d030c10874408dbf776af3aaeb53284c"}, - {file = "ruff-0.8.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b2f2f7a7e7648a2bfe6ead4e0a16745db956da0e3a231ad443d2a66a105c04fa"}, - {file = "ruff-0.8.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:adf314fc458374c25c5c4a4a9270c3e8a6a807b1bec018cfa2813d6546215540"}, - {file = "ruff-0.8.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a885d68342a231b5ba4d30b8c6e1b1ee3a65cf37e3d29b3c74069cdf1ee1e3c9"}, - {file = "ruff-0.8.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d2c16e3508c8cc73e96aa5127d0df8913d2290098f776416a4b157657bee44c5"}, - {file = "ruff-0.8.1-py3-none-win32.whl", hash = "sha256:93335cd7c0eaedb44882d75a7acb7df4b77cd7cd0d2255c93b28791716e81790"}, - {file = "ruff-0.8.1-py3-none-win_amd64.whl", hash = "sha256:2954cdbe8dfd8ab359d4a30cd971b589d335a44d444b6ca2cb3d1da21b75e4b6"}, - {file = "ruff-0.8.1-py3-none-win_arm64.whl", hash = "sha256:55873cc1a473e5ac129d15eccb3c008c096b94809d693fc7053f588b67822737"}, - {file = "ruff-0.8.1.tar.gz", hash = "sha256:3583db9a6450364ed5ca3f3b4225958b24f78178908d5c4bc0f46251ccca898f"}, + {file = "ruff-0.8.2-py3-none-linux_armv6l.whl", hash = "sha256:c49ab4da37e7c457105aadfd2725e24305ff9bc908487a9bf8d548c6dad8bb3d"}, + {file = "ruff-0.8.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ec016beb69ac16be416c435828be702ee694c0d722505f9c1f35e1b9c0cc1bf5"}, + {file = "ruff-0.8.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f05cdf8d050b30e2ba55c9b09330b51f9f97d36d4673213679b965d25a785f3c"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60f578c11feb1d3d257b2fb043ddb47501ab4816e7e221fbb0077f0d5d4e7b6f"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbd5cf9b0ae8f30eebc7b360171bd50f59ab29d39f06a670b3e4501a36ba5897"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b402ddee3d777683de60ff76da801fa7e5e8a71038f57ee53e903afbcefdaa58"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:705832cd7d85605cb7858d8a13d75993c8f3ef1397b0831289109e953d833d29"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32096b41aaf7a5cc095fa45b4167b890e4c8d3fd217603f3634c92a541de7248"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e769083da9439508833cfc7c23e351e1809e67f47c50248250ce1ac52c21fb93"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fe716592ae8a376c2673fdfc1f5c0c193a6d0411f90a496863c99cd9e2ae25d"}, + {file = "ruff-0.8.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:81c148825277e737493242b44c5388a300584d73d5774defa9245aaef55448b0"}, + {file = "ruff-0.8.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d261d7850c8367704874847d95febc698a950bf061c9475d4a8b7689adc4f7fa"}, + {file = "ruff-0.8.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1ca4e3a87496dc07d2427b7dd7ffa88a1e597c28dad65ae6433ecb9f2e4f022f"}, + {file = "ruff-0.8.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:729850feed82ef2440aa27946ab39c18cb4a8889c1128a6d589ffa028ddcfc22"}, + {file = "ruff-0.8.2-py3-none-win32.whl", hash = "sha256:ac42caaa0411d6a7d9594363294416e0e48fc1279e1b0e948391695db2b3d5b1"}, + {file = "ruff-0.8.2-py3-none-win_amd64.whl", hash = "sha256:2aae99ec70abf43372612a838d97bfe77d45146254568d94926e8ed5bbb409ea"}, + {file = "ruff-0.8.2-py3-none-win_arm64.whl", hash = "sha256:fb88e2a506b70cfbc2de6fae6681c4f944f7dd5f2fe87233a7233d888bad73e8"}, + {file = "ruff-0.8.2.tar.gz", hash = "sha256:b84f4f414dda8ac7f75075c1fa0b905ac0ff25361f42e6d5da681a465e0f78e5"}, ] [[package]] @@ -9074,13 +9070,13 @@ docs = ["sphinx"] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -9384,13 +9380,13 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "tencentcloud-sdk-python-common" -version = "3.0.1275" +version = "3.0.1277" description = "Tencent Cloud Common SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-common-3.0.1275.tar.gz", hash = "sha256:81ad21abfe142973f25b9601af812587fd7f028f25ea5aea19d13d397e0d1469"}, - {file = "tencentcloud_sdk_python_common-3.0.1275-py2.py3-none-any.whl", hash = "sha256:3bcf5ea373cf17efe2c312717afffe3dd2fb070d21bf0b289609a0e21fd45889"}, + {file = "tencentcloud-sdk-python-common-3.0.1277.tar.gz", hash = "sha256:6cbdd664a7e764588b7ce609b95f9842d695d4adf7bc41062d2c44b96635e05d"}, + {file = "tencentcloud_sdk_python_common-3.0.1277-py2.py3-none-any.whl", hash = "sha256:14a7c7da997f8a565fae23ad3e94416fa7f63613b052070135f6bea3e3a3bc95"}, ] [package.dependencies] @@ -9398,17 +9394,17 @@ requests = ">=2.16.0" [[package]] name = "tencentcloud-sdk-python-hunyuan" -version = "3.0.1275" +version = "3.0.1277" description = "Tencent Cloud Hunyuan SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-hunyuan-3.0.1275.tar.gz", hash = "sha256:15804b6f0e686e516ffbb39fd87200559189feddd12e7f1866cdd59c616294f2"}, - {file = "tencentcloud_sdk_python_hunyuan-3.0.1275-py2.py3-none-any.whl", hash = "sha256:97aa7b3af42fdbab001ecbc87f69b7215c67983d9fbac40a0bcc06a762a01132"}, + {file = "tencentcloud-sdk-python-hunyuan-3.0.1277.tar.gz", hash = "sha256:0df70b21f9affa1d6139f006abb4cd56ced07083e4306d7d8272080566715db3"}, + {file = "tencentcloud_sdk_python_hunyuan-3.0.1277-py2.py3-none-any.whl", hash = "sha256:2fef7233327fbf7bd2da987184d9dd731968aae0b7b6f2b9f177b0730b4e181f"}, ] [package.dependencies] -tencentcloud-sdk-python-common = "3.0.1275" +tencentcloud-sdk-python-common = "3.0.1277" [[package]] name = "termcolor" @@ -9793,13 +9789,13 @@ requests = ">=2.0.0" [[package]] name = "typer" -version = "0.15.0" +version = "0.15.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" files = [ - {file = "typer-0.15.0-py3-none-any.whl", hash = "sha256:bd16241db7e0f989ce1a0d8faa5aa1e43b9b9ac3fd1d4b8bcff91503d6717e38"}, - {file = "typer-0.15.0.tar.gz", hash = "sha256:8995452a598922ed8d8ad8c06ca63a218881ab601f5fa6fb0c511f7776497c7e"}, + {file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"}, + {file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"}, ] [package.dependencies] @@ -9948,13 +9944,13 @@ files = [ [[package]] name = "unstructured" -version = "0.16.9" +version = "0.16.10" description = "A library that prepares raw documents for downstream ML tasks." optional = false python-versions = "<3.13,>=3.9.0" files = [ - {file = "unstructured-0.16.9-py3-none-any.whl", hash = "sha256:246e44dc99e7913677b9bb274782a7d61f2e2682581106c346b6daf969bbaaa0"}, - {file = "unstructured-0.16.9.tar.gz", hash = "sha256:30b47d5baf2a4eaa993c75812fa947c9fea870000eb82473a216829aa1d407d5"}, + {file = "unstructured-0.16.10-py3-none-any.whl", hash = "sha256:738fc020fb4d9dfd1a3e54fee255221f7f916afafa16ff4e1a7a14495ba5b5ce"}, + {file = "unstructured-0.16.10.tar.gz", hash = "sha256:61c4a447514ab5d6f8629fde2da03833cf29e0bee26a1d3b901ac57d3b5d523a"}, ] [package.dependencies] @@ -10527,13 +10523,13 @@ requests = ">=2.0.0,<3.0.0" [[package]] name = "win32-setctime" -version = "1.1.0" +version = "1.2.0" description = "A small Python utility to set file creation time on Windows" optional = false python-versions = ">=3.5" files = [ - {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, - {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, + {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, + {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, ] [package.extras] @@ -10834,13 +10830,13 @@ requests = "*" [[package]] name = "zhipuai" -version = "2.1.5.20241203" +version = "2.1.5.20241204" description = "A SDK library for accessing big model apis from ZhipuAI" optional = false python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" files = [ - {file = "zhipuai-2.1.5.20241203-py3-none-any.whl", hash = "sha256:77267aebbb7dabbff1d0706c4fc1d529feb17959613d1b130ba58a733548c21c"}, - {file = "zhipuai-2.1.5.20241203.tar.gz", hash = "sha256:4096a467cb3f43c4eb63e6e19564d2347624ceaf89a529b9e849fff0935f3da2"}, + {file = "zhipuai-2.1.5.20241204-py3-none-any.whl", hash = "sha256:063c7527d6741ced82eedb19d53fd24ce61cf43ab835ee3c0262843f59503a7c"}, + {file = "zhipuai-2.1.5.20241204.tar.gz", hash = "sha256:888b42a83c8f1daf07375b84e560219eedab96b9f9e59542f0329928291db635"}, ] [package.dependencies] @@ -11056,4 +11052,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.13" -content-hash = "03d62501ae48efc47f3f35dbea7e66ccd1fbcebe69e263f6396d00e3803f2114" +content-hash = "1aa6a44bc9270d50c9c0ea09f55a304b5148bf4dbbbb068ff1b1ea8da6fa60cc" diff --git a/api/pyproject.toml b/api/pyproject.toml index e3820ecf9a..a20c129e9c 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -45,7 +45,7 @@ google-auth-httplib2 = "0.2.0" google-cloud-aiplatform = "1.49.0" google-generativeai = "0.8.1" googleapis-common-protos = "1.63.0" -gunicorn = "~22.0.0" +gunicorn = "~23.0.0" httpx = { version = "~0.27.0", extras = ["socks"] } huggingface-hub = "~0.16.4" jieba = "0.42.1" @@ -134,6 +134,7 @@ bce-python-sdk = "~0.9.23" cos-python-sdk-v5 = "1.9.30" esdk-obs-python = "3.24.6.1" google-cloud-storage = "2.16.0" +opendal = "~0.45.12" oss2 = "2.18.5" supabase = "~2.8.1" tos = "~2.7.1" diff --git a/api/schedule/clean_messages.py b/api/schedule/clean_messages.py index 72ee2a8901..97e5c77e95 100644 --- a/api/schedule/clean_messages.py +++ b/api/schedule/clean_messages.py @@ -36,14 +36,16 @@ def clean_messages(): db.session.query(Message) .filter(Message.created_at < plan_sandbox_clean_message_day) .order_by(Message.created_at.desc()) - .paginate(page=page, per_page=100) + .limit(100) + .all() ) except NotFound: break - if messages.items is None or len(messages.items) == 0: + if not messages: break - for message in messages.items: + for message in messages: + plan_sandbox_clean_message_day = message.created_at app = App.query.filter_by(id=message.app_id).first() features_cache_key = f"features:{app.tenant_id}" plan_cache = redis_client.get(features_cache_key) diff --git a/api/services/app_generate_service.py b/api/services/app_generate_service.py index 545de8190c..9def7d15e9 100644 --- a/api/services/app_generate_service.py +++ b/api/services/app_generate_service.py @@ -108,6 +108,9 @@ class AppGenerateService: raise ValueError(f"Invalid app mode {app_model.mode}") except RateLimitError as e: raise InvokeRateLimitError(str(e)) + except Exception: + rate_limit.exit(request_id) + raise finally: if not streaming: rate_limit.exit(request_id) diff --git a/api/services/external_knowledge_service.py b/api/services/external_knowledge_service.py index 7e3cd87f1e..7be20301a7 100644 --- a/api/services/external_knowledge_service.py +++ b/api/services/external_knowledge_service.py @@ -69,7 +69,10 @@ class ExternalDatasetService: endpoint = f"{settings['endpoint']}/retrieval" api_key = settings["api_key"] if not validators.url(endpoint, simple_host=True): - raise ValueError(f"invalid endpoint: {endpoint}") + if not endpoint.startswith("http://") and not endpoint.startswith("https://"): + raise ValueError(f"invalid endpoint: {endpoint} must start with http:// or https://") + else: + raise ValueError(f"invalid endpoint: {endpoint}") try: response = httpx.post(endpoint, headers={"Authorization": f"Bearer {api_key}"}) except Exception as e: diff --git a/api/services/tools/workflow_tools_manage_service.py b/api/services/tools/workflow_tools_manage_service.py index 833881b668..318107bebb 100644 --- a/api/services/tools/workflow_tools_manage_service.py +++ b/api/services/tools/workflow_tools_manage_service.py @@ -81,6 +81,10 @@ class WorkflowToolManageService: db.session.add(workflow_tool_provider) db.session.commit() + if labels is not None: + ToolLabelManager.update_tool_labels( + ToolTransformService.workflow_provider_to_controller(workflow_tool_provider), labels + ) return {"result": "success"} @classmethod diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index 37d7d0937c..84768d5af0 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -2,7 +2,7 @@ import json import time from collections.abc import Sequence from datetime import UTC, datetime -from typing import Optional +from typing import Optional, cast from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager @@ -11,6 +11,9 @@ from core.variables import Variable from core.workflow.entities.node_entities import NodeRunResult from core.workflow.errors import WorkflowNodeRunFailedError from core.workflow.nodes import NodeType +from core.workflow.nodes.base.entities import BaseNodeData +from core.workflow.nodes.base.node import BaseNode +from core.workflow.nodes.enums import ErrorStrategy from core.workflow.nodes.event import RunCompletedEvent from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING from core.workflow.workflow_entry import WorkflowEntry @@ -225,7 +228,7 @@ class WorkflowService: user_inputs=user_inputs, user_id=account.id, ) - + node_instance = cast(BaseNode[BaseNodeData], node_instance) node_run_result: NodeRunResult | None = None for event in generator: if isinstance(event, RunCompletedEvent): @@ -237,8 +240,35 @@ class WorkflowService: if not node_run_result: raise ValueError("Node run failed with no run result") - - run_succeeded = True if node_run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED else False + # single step debug mode error handling return + if node_run_result.status == WorkflowNodeExecutionStatus.FAILED and node_instance.should_continue_on_error: + node_error_args = { + "status": WorkflowNodeExecutionStatus.EXCEPTION, + "error": node_run_result.error, + "inputs": node_run_result.inputs, + "metadata": {"error_strategy": node_instance.node_data.error_strategy}, + } + if node_instance.node_data.error_strategy is ErrorStrategy.DEFAULT_VALUE: + node_run_result = NodeRunResult( + **node_error_args, + outputs={ + **node_instance.node_data.default_value_dict, + "error_message": node_run_result.error, + "error_type": node_run_result.error_type, + }, + ) + else: + node_run_result = NodeRunResult( + **node_error_args, + outputs={ + "error_message": node_run_result.error, + "error_type": node_run_result.error_type, + }, + ) + run_succeeded = node_run_result.status in ( + WorkflowNodeExecutionStatus.SUCCEEDED, + WorkflowNodeExecutionStatus.EXCEPTION, + ) error = node_run_result.error if not run_succeeded else None except WorkflowNodeRunFailedError as e: node_instance = e.node_instance @@ -260,7 +290,6 @@ class WorkflowService: workflow_node_execution.created_by = account.id workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None) workflow_node_execution.finished_at = datetime.now(UTC).replace(tzinfo=None) - if run_succeeded and node_run_result: # create workflow node execution inputs = WorkflowEntry.handle_special_values(node_run_result.inputs) if node_run_result.inputs else None @@ -277,7 +306,11 @@ class WorkflowService: workflow_node_execution.execution_metadata = ( json.dumps(jsonable_encoder(node_run_result.metadata)) if node_run_result.metadata else None ) - workflow_node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED.value + if node_run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED: + workflow_node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED.value + elif node_run_result.status == WorkflowNodeExecutionStatus.EXCEPTION: + workflow_node_execution.status = WorkflowNodeExecutionStatus.EXCEPTION.value + workflow_node_execution.error = node_run_result.error else: # create workflow node execution workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value diff --git a/api/tasks/external_document_indexing_task.py b/api/tasks/external_document_indexing_task.py index 69f3072e2d..a45b3030bf 100644 --- a/api/tasks/external_document_indexing_task.py +++ b/api/tasks/external_document_indexing_task.py @@ -3,7 +3,7 @@ import logging import time import click -from celery import shared_task +from celery import shared_task # type: ignore from core.indexing_runner import DocumentIsPausedError from extensions.ext_database import db @@ -68,11 +68,9 @@ def external_document_indexing_task( settings = ExternalDatasetService.get_external_knowledge_api_settings( json.loads(external_knowledge_api.settings) ) - # assemble headers - headers = ExternalDatasetService.assembling_headers(settings.authorization, settings.headers) # do http request - response = ExternalDatasetService.process_external_api(settings, headers, process_parameter, files) + response = ExternalDatasetService.process_external_api(settings, files) job_id = response.json().get("job_id") if job_id: # save job_id to dataset diff --git a/api/tests/integration_tests/vdb/lindorm/test_lindorm.py b/api/tests/integration_tests/vdb/lindorm/test_lindorm.py index f8f43ba6ef..0a26d3ea1c 100644 --- a/api/tests/integration_tests/vdb/lindorm/test_lindorm.py +++ b/api/tests/integration_tests/vdb/lindorm/test_lindorm.py @@ -7,9 +7,10 @@ env = environs.Env() class Config: - SEARCH_ENDPOINT = env.str("SEARCH_ENDPOINT", "http://ld-*************-proxy-search-pub.lindorm.aliyuncs.com:30070") + SEARCH_ENDPOINT = env.str("SEARCH_ENDPOINT", "http://ld-************-proxy-search-pub.lindorm.aliyuncs.com:30070") SEARCH_USERNAME = env.str("SEARCH_USERNAME", "ADMIN") - SEARCH_PWD = env.str("SEARCH_PWD", "PWD") + SEARCH_PWD = env.str("SEARCH_PWD", "ADMIN") + USING_UGC = env.bool("USING_UGC", True) class TestLindormVectorStore(AbstractVectorTest): @@ -31,5 +32,27 @@ class TestLindormVectorStore(AbstractVectorTest): assert ids[0] == self.example_doc_id -def test_lindorm_vector(setup_mock_redis): +class TestLindormVectorStoreUGC(AbstractVectorTest): + def __init__(self): + super().__init__() + self.vector = LindormVectorStore( + collection_name="ugc_index_test", + config=LindormVectorStoreConfig( + hosts=Config.SEARCH_ENDPOINT, + username=Config.SEARCH_USERNAME, + password=Config.SEARCH_PWD, + using_ugc=Config.USING_UGC, + ), + routing_value=self.collection_name, + ) + + def get_ids_by_metadata_field(self): + ids = self.vector.get_ids_by_metadata_field(key="doc_id", value=self.example_doc_id) + assert ids is not None + assert len(ids) == 1 + assert ids[0] == self.example_doc_id + + +def test_lindorm_vector_ugc(setup_mock_redis): TestLindormVectorStore().run_all_tests() + TestLindormVectorStoreUGC().run_all_tests() diff --git a/api/tests/integration_tests/vdb/tidb_vector/test_tidb_vector.py b/api/tests/integration_tests/vdb/tidb_vector/test_tidb_vector.py index 2a5320c7d5..4c83c66bff 100644 --- a/api/tests/integration_tests/vdb/tidb_vector/test_tidb_vector.py +++ b/api/tests/integration_tests/vdb/tidb_vector/test_tidb_vector.py @@ -12,11 +12,11 @@ def tidb_vector(): return TiDBVector( collection_name="test_collection", config=TiDBVectorConfig( - host="xxx.eu-central-1.xxx.aws.tidbcloud.com", - port="4000", - user="xxx.root", - password="xxxxxx", - database="dify", + host="localhost", + port=4000, + user="root", + password="", + database="test", program_name="langgenius/dify", ), ) @@ -27,35 +27,14 @@ class TiDBVectorTest(AbstractVectorTest): super().__init__() self.vector = vector - def text_exists(self): - exist = self.vector.text_exists(self.example_doc_id) - assert exist == False - - def search_by_vector(self): - hits_by_vector: list[Document] = self.vector.search_by_vector(query_vector=self.example_embedding) - assert len(hits_by_vector) == 0 - def search_by_full_text(self): hits_by_full_text: list[Document] = self.vector.search_by_full_text(query=get_example_text()) assert len(hits_by_full_text) == 0 def get_ids_by_metadata_field(self): - ids = self.vector.get_ids_by_metadata_field(key="document_id", value=self.example_doc_id) - assert len(ids) == 0 + ids = self.vector.get_ids_by_metadata_field(key="doc_id", value=self.example_doc_id) + assert len(ids) == 1 -def test_tidb_vector(setup_mock_redis, setup_tidbvector_mock, tidb_vector, mock_session): +def test_tidb_vector(setup_mock_redis, tidb_vector): TiDBVectorTest(vector=tidb_vector).run_all_tests() - - -@pytest.fixture -def mock_session(): - with patch("core.rag.datasource.vdb.tidb_vector.tidb_vector.Session", new_callable=MagicMock) as mock_session: - yield mock_session - - -@pytest.fixture -def setup_tidbvector_mock(tidb_vector, mock_session): - with patch("core.rag.datasource.vdb.tidb_vector.tidb_vector.create_engine"): - with patch.object(tidb_vector._engine, "connect"): - yield tidb_vector diff --git a/api/tests/unit_tests/configs/test_dify_config.py b/api/tests/unit_tests/configs/test_dify_config.py index 0eb310a51a..385eb08c36 100644 --- a/api/tests/unit_tests/configs/test_dify_config.py +++ b/api/tests/unit_tests/configs/test_dify_config.py @@ -37,7 +37,11 @@ def test_dify_config_undefined_entry(example_env_file): assert config["LOG_LEVEL"] == "INFO" +# NOTE: If there is a `.env` file in your Workspace, this test might not succeed as expected. +# This is due to `pymilvus` loading all the variables from the `.env` file into `os.environ`. def test_dify_config(example_env_file): + # clear system environment variables + os.environ.clear() # load dotenv file with pydantic-settings config = DifyConfig(_env_file=example_env_file) diff --git a/api/tests/unit_tests/configs/test_opendal_config_parse.py b/api/tests/unit_tests/configs/test_opendal_config_parse.py new file mode 100644 index 0000000000..94de40450b --- /dev/null +++ b/api/tests/unit_tests/configs/test_opendal_config_parse.py @@ -0,0 +1,20 @@ +import pytest + +from extensions.storage.opendal_storage import is_r2_endpoint + + +@pytest.mark.parametrize( + ("endpoint", "expected"), + [ + ("https://bucket.r2.cloudflarestorage.com", True), + ("https://custom-domain.r2.cloudflarestorage.com/", True), + ("https://bucket.r2.cloudflarestorage.com/path", True), + ("https://s3.amazonaws.com", False), + ("https://storage.googleapis.com", False), + ("http://localhost:9000", False), + ("invalid-url", False), + ("", False), + ], +) +def test_is_r2_endpoint(endpoint: str, expected: bool): + assert is_r2_endpoint(endpoint) == expected diff --git a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py index af85b927a3..285384ee6e 100644 --- a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py +++ b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py @@ -48,7 +48,7 @@ def test_executor_with_json_body_and_number_variable(): assert executor.method == "post" assert executor.url == "https://api.example.com/data" assert executor.headers == {"Content-Type": "application/json"} - assert executor.params == {} + assert executor.params == [] assert executor.json == {"number": 42} assert executor.data is None assert executor.files is None @@ -102,9 +102,8 @@ def test_executor_with_json_body_and_object_variable(): assert executor.method == "post" assert executor.url == "https://api.example.com/data" assert executor.headers == {"Content-Type": "application/json"} - assert executor.params == {} - assert executor.json == {"name": "John Doe", - "age": 30, "email": "john@example.com"} + assert executor.params == [] + assert executor.json == {"name": "John Doe", "age": 30, "email": "john@example.com"} assert executor.data is None assert executor.files is None assert executor.content is None @@ -159,9 +158,8 @@ def test_executor_with_json_body_and_nested_object_variable(): assert executor.method == "post" assert executor.url == "https://api.example.com/data" assert executor.headers == {"Content-Type": "application/json"} - assert executor.params == {} - assert executor.json == {"object": { - "name": "John Doe", "age": 30, "email": "john@example.com"}} + assert executor.params == [] + assert executor.json == {"object": {"name": "John Doe", "age": 30, "email": "john@example.com"}} assert executor.data is None assert executor.files is None assert executor.content is None @@ -199,7 +197,7 @@ def test_extract_selectors_from_template_with_newline(): variable_pool=variable_pool, ) - assert executor.params == {"test": "line1\nline2"} + assert executor.params == [("test", "line1\nline2")] def test_executor_with_form_data(): @@ -248,7 +246,7 @@ def test_executor_with_form_data(): assert executor.url == "https://api.example.com/upload" assert "Content-Type" in executor.headers assert "multipart/form-data" in executor.headers["Content-Type"] - assert executor.params == {} + assert executor.params == [] assert executor.json is None assert executor.files is None assert executor.content is None @@ -269,3 +267,72 @@ def test_executor_with_form_data(): assert "Hello, World!" in raw_request assert "number_field" in raw_request assert "42" in raw_request + + +def test_init_headers(): + def create_executor(headers: str) -> Executor: + node_data = HttpRequestNodeData( + title="test", + method="get", + url="http://example.com", + headers=headers, + params="", + authorization=HttpRequestNodeAuthorization(type="no-auth"), + ) + timeout = HttpRequestNodeTimeout(connect=10, read=30, write=30) + return Executor(node_data=node_data, timeout=timeout, variable_pool=VariablePool()) + + executor = create_executor("aa\n cc:") + executor._init_headers() + assert executor.headers == {"aa": "", "cc": ""} + + executor = create_executor("aa:bb\n cc:dd") + executor._init_headers() + assert executor.headers == {"aa": "bb", "cc": "dd"} + + executor = create_executor("aa:bb\n cc:dd\n") + executor._init_headers() + assert executor.headers == {"aa": "bb", "cc": "dd"} + + executor = create_executor("aa:bb\n\n cc : dd\n\n") + executor._init_headers() + assert executor.headers == {"aa": "bb", "cc": "dd"} + + +def test_init_params(): + def create_executor(params: str) -> Executor: + node_data = HttpRequestNodeData( + title="test", + method="get", + url="http://example.com", + headers="", + params=params, + authorization=HttpRequestNodeAuthorization(type="no-auth"), + ) + timeout = HttpRequestNodeTimeout(connect=10, read=30, write=30) + return Executor(node_data=node_data, timeout=timeout, variable_pool=VariablePool()) + + # Test basic key-value pairs + executor = create_executor("key1:value1\nkey2:value2") + executor._init_params() + assert executor.params == [("key1", "value1"), ("key2", "value2")] + + # Test empty values + executor = create_executor("key1:\nkey2:") + executor._init_params() + assert executor.params == [("key1", ""), ("key2", "")] + + # Test duplicate keys (which is allowed for params) + executor = create_executor("key1:value1\nkey1:value2") + executor._init_params() + assert executor.params == [("key1", "value1"), ("key1", "value2")] + + # Test whitespace handling + executor = create_executor(" key1 : value1 \n key2 : value2 ") + executor._init_params() + assert executor.params == [("key1", "value1"), ("key2", "value2")] + + # Test empty lines and extra whitespace + executor = create_executor("key1:value1\n\nkey2:value2\n\n") + executor._init_params() + assert executor.params == [("key1", "value1"), ("key2", "value2")] diff --git a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_node.py b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_node.py index 9d6ab53644..304c7d6598 100644 --- a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_node.py @@ -14,7 +14,6 @@ from core.workflow.nodes.http_request import ( HttpRequestNodeBody, HttpRequestNodeData, ) -from core.workflow.nodes.http_request.executor import _plain_text_to_dict from models.enums import UserFrom from models.workflow import WorkflowNodeExecutionStatus, WorkflowType diff --git a/api/tests/unit_tests/core/workflow/nodes/test_continue_on_error.py b/api/tests/unit_tests/core/workflow/nodes/test_continue_on_error.py new file mode 100644 index 0000000000..ba209e4020 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/nodes/test_continue_on_error.py @@ -0,0 +1,502 @@ +from core.app.entities.app_invoke_entities import InvokeFrom +from core.workflow.enums import SystemVariableKey +from core.workflow.graph_engine.entities.event import ( + GraphRunPartialSucceededEvent, + GraphRunSucceededEvent, + NodeRunExceptionEvent, + NodeRunStreamChunkEvent, +) +from core.workflow.graph_engine.entities.graph import Graph +from core.workflow.graph_engine.graph_engine import GraphEngine +from models.enums import UserFrom +from models.workflow import WorkflowType + + +class ContinueOnErrorTestHelper: + @staticmethod + def get_code_node(code: str, error_strategy: str = "fail-branch", default_value: dict | None = None): + """Helper method to create a code node configuration""" + node = { + "id": "node", + "data": { + "outputs": {"result": {"type": "number"}}, + "error_strategy": error_strategy, + "title": "code", + "variables": [], + "code_language": "python3", + "code": "\n".join([line[4:] for line in code.split("\n")]), + "type": "code", + }, + } + if default_value: + node["data"]["default_value"] = default_value + return node + + @staticmethod + def get_http_node( + error_strategy: str = "fail-branch", default_value: dict | None = None, authorization_success: bool = False + ): + """Helper method to create a http node configuration""" + authorization = ( + { + "type": "api-key", + "config": { + "type": "basic", + "api_key": "ak-xxx", + "header": "api-key", + }, + } + if authorization_success + else { + "type": "api-key", + # missing config field + } + ) + node = { + "id": "node", + "data": { + "title": "http", + "desc": "", + "method": "get", + "url": "http://example.com", + "authorization": authorization, + "headers": "X-Header:123", + "params": "A:b", + "body": None, + "type": "http-request", + "error_strategy": error_strategy, + }, + } + if default_value: + node["data"]["default_value"] = default_value + return node + + @staticmethod + def get_error_status_code_http_node(error_strategy: str = "fail-branch", default_value: dict | None = None): + """Helper method to create a http node configuration""" + node = { + "id": "node", + "data": { + "type": "http-request", + "title": "HTTP Request", + "desc": "", + "variables": [], + "method": "get", + "url": "https://api.github.com/issues", + "authorization": {"type": "no-auth", "config": None}, + "headers": "", + "params": "", + "body": {"type": "none", "data": []}, + "timeout": {"max_connect_timeout": 0, "max_read_timeout": 0, "max_write_timeout": 0}, + "error_strategy": error_strategy, + }, + } + if default_value: + node["data"]["default_value"] = default_value + return node + + @staticmethod + def get_tool_node(error_strategy: str = "fail-branch", default_value: dict | None = None): + """Helper method to create a tool node configuration""" + node = { + "id": "node", + "data": { + "title": "a", + "desc": "a", + "provider_id": "maths", + "provider_type": "builtin", + "provider_name": "maths", + "tool_name": "eval_expression", + "tool_label": "eval_expression", + "tool_configurations": {}, + "tool_parameters": { + "expression": { + "type": "variable", + "value": ["1", "123", "args1"], + } + }, + "type": "tool", + "error_strategy": error_strategy, + }, + } + if default_value: + node["data"]["default_value"] = default_value + return node + + @staticmethod + def get_llm_node(error_strategy: str = "fail-branch", default_value: dict | None = None): + """Helper method to create a llm node configuration""" + node = { + "id": "node", + "data": { + "title": "123", + "type": "llm", + "model": {"provider": "openai", "name": "gpt-3.5-turbo", "mode": "chat", "completion_params": {}}, + "prompt_template": [ + {"role": "system", "text": "you are a helpful assistant.\ntoday's weather is {{#abc.output#}}."}, + {"role": "user", "text": "{{#sys.query#}}"}, + ], + "memory": None, + "context": {"enabled": False}, + "vision": {"enabled": False}, + "error_strategy": error_strategy, + }, + } + if default_value: + node["data"]["default_value"] = default_value + return node + + @staticmethod + def create_test_graph_engine(graph_config: dict, user_inputs: dict | None = None): + """Helper method to create a graph engine instance for testing""" + graph = Graph.init(graph_config=graph_config) + variable_pool = { + "system_variables": { + SystemVariableKey.QUERY: "clear", + SystemVariableKey.FILES: [], + SystemVariableKey.CONVERSATION_ID: "abababa", + SystemVariableKey.USER_ID: "aaa", + }, + "user_inputs": user_inputs or {"uid": "takato"}, + } + + return GraphEngine( + tenant_id="111", + app_id="222", + workflow_type=WorkflowType.CHAT, + workflow_id="333", + graph_config=graph_config, + user_id="444", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.WEB_APP, + call_depth=0, + graph=graph, + variable_pool=variable_pool, + max_execution_steps=500, + max_execution_time=1200, + ) + + +DEFAULT_VALUE_EDGE = [ + { + "id": "start-source-node-target", + "source": "start", + "target": "node", + "sourceHandle": "source", + }, + { + "id": "node-source-answer-target", + "source": "node", + "target": "answer", + "sourceHandle": "source", + }, +] + +FAIL_BRANCH_EDGES = [ + { + "id": "start-source-node-target", + "source": "start", + "target": "node", + "sourceHandle": "source", + }, + { + "id": "node-true-success-target", + "source": "node", + "target": "success", + "sourceHandle": "source", + }, + { + "id": "node-false-error-target", + "source": "node", + "target": "error", + "sourceHandle": "fail-branch", + }, +] + + +def test_code_default_value_continue_on_error(): + error_code = """ + def main() -> dict: + return { + "result": 1 / 0, + } + """ + + graph_config = { + "edges": DEFAULT_VALUE_EDGE, + "nodes": [ + {"data": {"title": "start", "type": "start", "variables": []}, "id": "start"}, + {"data": {"title": "answer", "type": "answer", "answer": "{{#node.result#}}"}, "id": "answer"}, + ContinueOnErrorTestHelper.get_code_node( + error_code, "default-value", [{"key": "result", "type": "number", "value": 132123}] + ), + ], + } + + graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) + events = list(graph_engine.run()) + assert any(isinstance(e, NodeRunExceptionEvent) for e in events) + assert any(isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "132123"} for e in events) + assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 + + +def test_code_fail_branch_continue_on_error(): + error_code = """ + def main() -> dict: + return { + "result": 1 / 0, + } + """ + + graph_config = { + "edges": FAIL_BRANCH_EDGES, + "nodes": [ + {"data": {"title": "Start", "type": "start", "variables": []}, "id": "start"}, + { + "data": {"title": "success", "type": "answer", "answer": "node node run successfully"}, + "id": "success", + }, + { + "data": {"title": "error", "type": "answer", "answer": "node node run failed"}, + "id": "error", + }, + ContinueOnErrorTestHelper.get_code_node(error_code), + ], + } + + graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) + events = list(graph_engine.run()) + assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 + assert any(isinstance(e, NodeRunExceptionEvent) for e in events) + assert any( + isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "node node run failed"} for e in events + ) + + +def test_http_node_default_value_continue_on_error(): + """Test HTTP node with default value error strategy""" + graph_config = { + "edges": DEFAULT_VALUE_EDGE, + "nodes": [ + {"data": {"title": "start", "type": "start", "variables": []}, "id": "start"}, + {"data": {"title": "answer", "type": "answer", "answer": "{{#node.response#}}"}, "id": "answer"}, + ContinueOnErrorTestHelper.get_http_node( + "default-value", [{"key": "response", "type": "string", "value": "http node got error response"}] + ), + ], + } + + graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) + events = list(graph_engine.run()) + + assert any(isinstance(e, NodeRunExceptionEvent) for e in events) + assert any( + isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "http node got error response"} + for e in events + ) + assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 + + +def test_http_node_fail_branch_continue_on_error(): + """Test HTTP node with fail-branch error strategy""" + graph_config = { + "edges": FAIL_BRANCH_EDGES, + "nodes": [ + {"data": {"title": "Start", "type": "start", "variables": []}, "id": "start"}, + { + "data": {"title": "success", "type": "answer", "answer": "HTTP request successful"}, + "id": "success", + }, + { + "data": {"title": "error", "type": "answer", "answer": "HTTP request failed"}, + "id": "error", + }, + ContinueOnErrorTestHelper.get_http_node(), + ], + } + + graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) + events = list(graph_engine.run()) + + assert any(isinstance(e, NodeRunExceptionEvent) for e in events) + assert any( + isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "HTTP request failed"} for e in events + ) + assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 + + +def test_tool_node_default_value_continue_on_error(): + """Test tool node with default value error strategy""" + graph_config = { + "edges": DEFAULT_VALUE_EDGE, + "nodes": [ + {"data": {"title": "start", "type": "start", "variables": []}, "id": "start"}, + {"data": {"title": "answer", "type": "answer", "answer": "{{#node.result#}}"}, "id": "answer"}, + ContinueOnErrorTestHelper.get_tool_node( + "default-value", [{"key": "result", "type": "string", "value": "default tool result"}] + ), + ], + } + + graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) + events = list(graph_engine.run()) + + assert any(isinstance(e, NodeRunExceptionEvent) for e in events) + assert any( + isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "default tool result"} for e in events + ) + assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 + + +def test_tool_node_fail_branch_continue_on_error(): + """Test HTTP node with fail-branch error strategy""" + graph_config = { + "edges": FAIL_BRANCH_EDGES, + "nodes": [ + {"data": {"title": "Start", "type": "start", "variables": []}, "id": "start"}, + { + "data": {"title": "success", "type": "answer", "answer": "tool execute successful"}, + "id": "success", + }, + { + "data": {"title": "error", "type": "answer", "answer": "tool execute failed"}, + "id": "error", + }, + ContinueOnErrorTestHelper.get_tool_node(), + ], + } + + graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) + events = list(graph_engine.run()) + + assert any(isinstance(e, NodeRunExceptionEvent) for e in events) + assert any( + isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "tool execute failed"} for e in events + ) + assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 + + +def test_llm_node_default_value_continue_on_error(): + """Test LLM node with default value error strategy""" + graph_config = { + "edges": DEFAULT_VALUE_EDGE, + "nodes": [ + {"data": {"title": "start", "type": "start", "variables": []}, "id": "start"}, + {"data": {"title": "answer", "type": "answer", "answer": "{{#node.answer#}}"}, "id": "answer"}, + ContinueOnErrorTestHelper.get_llm_node( + "default-value", [{"key": "answer", "type": "string", "value": "default LLM response"}] + ), + ], + } + + graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) + events = list(graph_engine.run()) + + assert any(isinstance(e, NodeRunExceptionEvent) for e in events) + assert any( + isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "default LLM response"} for e in events + ) + assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 + + +def test_llm_node_fail_branch_continue_on_error(): + """Test LLM node with fail-branch error strategy""" + graph_config = { + "edges": FAIL_BRANCH_EDGES, + "nodes": [ + {"data": {"title": "Start", "type": "start", "variables": []}, "id": "start"}, + { + "data": {"title": "success", "type": "answer", "answer": "LLM request successful"}, + "id": "success", + }, + { + "data": {"title": "error", "type": "answer", "answer": "LLM request failed"}, + "id": "error", + }, + ContinueOnErrorTestHelper.get_llm_node(), + ], + } + + graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) + events = list(graph_engine.run()) + + assert any(isinstance(e, NodeRunExceptionEvent) for e in events) + assert any( + isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "LLM request failed"} for e in events + ) + assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 + + +def test_status_code_error_http_node_fail_branch_continue_on_error(): + """Test HTTP node with fail-branch error strategy""" + graph_config = { + "edges": FAIL_BRANCH_EDGES, + "nodes": [ + {"data": {"title": "Start", "type": "start", "variables": []}, "id": "start"}, + { + "data": {"title": "success", "type": "answer", "answer": "http execute successful"}, + "id": "success", + }, + { + "data": {"title": "error", "type": "answer", "answer": "http execute failed"}, + "id": "error", + }, + ContinueOnErrorTestHelper.get_error_status_code_http_node(), + ], + } + + graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) + events = list(graph_engine.run()) + + assert any(isinstance(e, NodeRunExceptionEvent) for e in events) + assert any( + isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "http execute failed"} for e in events + ) + assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 + + +def test_variable_pool_error_type_variable(): + graph_config = { + "edges": FAIL_BRANCH_EDGES, + "nodes": [ + {"data": {"title": "Start", "type": "start", "variables": []}, "id": "start"}, + { + "data": {"title": "success", "type": "answer", "answer": "http execute successful"}, + "id": "success", + }, + { + "data": {"title": "error", "type": "answer", "answer": "http execute failed"}, + "id": "error", + }, + ContinueOnErrorTestHelper.get_error_status_code_http_node(), + ], + } + + graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) + list(graph_engine.run()) + error_message = graph_engine.graph_runtime_state.variable_pool.get(["node", "error_message"]) + error_type = graph_engine.graph_runtime_state.variable_pool.get(["node", "error_type"]) + assert error_message != None + assert error_type.value == "HTTPResponseCodeError" + + +def test_no_node_in_fail_branch_continue_on_error(): + """Test HTTP node with fail-branch error strategy""" + graph_config = { + "edges": FAIL_BRANCH_EDGES[:-1], + "nodes": [ + {"data": {"title": "Start", "type": "start", "variables": []}, "id": "start"}, + { + "data": {"title": "success", "type": "answer", "answer": "HTTP request successful"}, + "id": "success", + }, + ContinueOnErrorTestHelper.get_http_node(), + ], + } + + graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) + events = list(graph_engine.run()) + + assert any(isinstance(e, NodeRunExceptionEvent) for e in events) + assert any(isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {} for e in events) + assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 0 diff --git a/api/tests/unit_tests/oss/__mock/base.py b/api/tests/unit_tests/oss/__mock/base.py index a1eaaab9c3..bb3c9716c3 100644 --- a/api/tests/unit_tests/oss/__mock/base.py +++ b/api/tests/unit_tests/oss/__mock/base.py @@ -6,13 +6,17 @@ from extensions.storage.base_storage import BaseStorage def get_example_folder() -> str: - return "/dify" + return "~/dify" def get_example_bucket() -> str: return "dify" +def get_opendal_bucket() -> str: + return "./dify" + + def get_example_filename() -> str: return "test.txt" @@ -22,14 +26,14 @@ def get_example_data() -> bytes: def get_example_filepath() -> str: - return "/test" + return "~/test" class BaseStorageTest: @pytest.fixture(autouse=True) - def setup_method(self): + def setup_method(self, *args, **kwargs): """Should be implemented in child classes to setup specific storage.""" - self.storage = BaseStorage() + self.storage: BaseStorage def test_save(self): """Test saving data.""" diff --git a/api/tests/unit_tests/oss/local/test_local_fs.py b/api/tests/unit_tests/oss/local/test_local_fs.py deleted file mode 100644 index 03ce7d2450..0000000000 --- a/api/tests/unit_tests/oss/local/test_local_fs.py +++ /dev/null @@ -1,18 +0,0 @@ -from collections.abc import Generator - -import pytest - -from extensions.storage.local_fs_storage import LocalFsStorage -from tests.unit_tests.oss.__mock.base import ( - BaseStorageTest, - get_example_folder, -) -from tests.unit_tests.oss.__mock.local import setup_local_fs_mock - - -class TestLocalFS(BaseStorageTest): - @pytest.fixture(autouse=True) - def setup_method(self, setup_local_fs_mock): - """Executed before each test method.""" - self.storage = LocalFsStorage() - self.storage.folder = get_example_folder() diff --git a/api/tests/unit_tests/oss/local/__init__.py b/api/tests/unit_tests/oss/opendal/__init__.py similarity index 100% rename from api/tests/unit_tests/oss/local/__init__.py rename to api/tests/unit_tests/oss/opendal/__init__.py diff --git a/api/tests/unit_tests/oss/opendal/test_opendal.py b/api/tests/unit_tests/oss/opendal/test_opendal.py new file mode 100644 index 0000000000..1caee55677 --- /dev/null +++ b/api/tests/unit_tests/oss/opendal/test_opendal.py @@ -0,0 +1,88 @@ +import os +from collections.abc import Generator +from pathlib import Path + +import pytest + +from configs.middleware.storage.opendal_storage_config import OpenDALScheme +from extensions.storage.opendal_storage import OpenDALStorage +from tests.unit_tests.oss.__mock.base import ( + get_example_data, + get_example_filename, + get_example_filepath, + get_opendal_bucket, +) + + +class TestOpenDAL: + @pytest.fixture(autouse=True) + def setup_method(self, *args, **kwargs): + """Executed before each test method.""" + self.storage = OpenDALStorage( + scheme=OpenDALScheme.FS, + root=get_opendal_bucket(), + ) + + @pytest.fixture(scope="class", autouse=True) + def teardown_class(self, request): + """Clean up after all tests in the class.""" + + def cleanup(): + folder = Path(get_opendal_bucket()) + if folder.exists() and folder.is_dir(): + for item in folder.iterdir(): + if item.is_file(): + item.unlink() + elif item.is_dir(): + item.rmdir() + folder.rmdir() + + return cleanup() + + def test_save_and_exists(self): + """Test saving data and checking existence.""" + filename = get_example_filename() + data = get_example_data() + + assert not self.storage.exists(filename) + self.storage.save(filename, data) + assert self.storage.exists(filename) + + def test_load_once(self): + """Test loading data once.""" + filename = get_example_filename() + data = get_example_data() + + self.storage.save(filename, data) + loaded_data = self.storage.load_once(filename) + assert loaded_data == data + + def test_load_stream(self): + """Test loading data as a stream.""" + filename = get_example_filename() + data = get_example_data() + + self.storage.save(filename, data) + generator = self.storage.load_stream(filename) + assert isinstance(generator, Generator) + assert next(generator) == data + + def test_download(self): + """Test downloading data to a file.""" + filename = get_example_filename() + filepath = str(Path(get_opendal_bucket()) / filename) + data = get_example_data() + + self.storage.save(filename, data) + self.storage.download(filename, filepath) + + def test_delete(self): + """Test deleting a file.""" + filename = get_example_filename() + data = get_example_data() + + self.storage.save(filename, data) + assert self.storage.exists(filename) + + self.storage.delete(filename) + assert not self.storage.exists(filename) diff --git a/dev/pytest/pytest_vdb.sh b/dev/pytest/pytest_vdb.sh index 02a9f49279..c68a94c79b 100755 --- a/dev/pytest/pytest_vdb.sh +++ b/dev/pytest/pytest_vdb.sh @@ -14,3 +14,4 @@ pytest api/tests/integration_tests/vdb/chroma \ api/tests/integration_tests/vdb/upstash \ api/tests/integration_tests/vdb/couchbase \ api/tests/integration_tests/vdb/oceanbase \ + api/tests/integration_tests/vdb/tidb_vector \ diff --git a/docker-legacy/docker-compose.yaml b/docker-legacy/docker-compose.yaml index e7a2daf9cd..4392407a73 100644 --- a/docker-legacy/docker-compose.yaml +++ b/docker-legacy/docker-compose.yaml @@ -2,7 +2,7 @@ version: '3' services: # API service api: - image: langgenius/dify-api:0.13.1 + image: langgenius/dify-api:0.13.2 restart: always environment: # Startup mode, 'api' starts the API server. @@ -227,7 +227,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:0.13.1 + image: langgenius/dify-api:0.13.2 restart: always environment: CONSOLE_WEB_URL: '' @@ -397,7 +397,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:0.13.1 + image: langgenius/dify-web:0.13.2 restart: always environment: # The base URL of console application api server, refers to the Console base URL of WEB service if console domain is diff --git a/docker/.env.example b/docker/.env.example index 719a025877..db85e5d511 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -281,10 +281,23 @@ CONSOLE_CORS_ALLOW_ORIGINS=* # ------------------------------ # The type of storage to use for storing user files. -# Supported values are `local` , `s3` , `azure-blob` , `google-storage`, `tencent-cos`, `huawei-obs`, `volcengine-tos`, `baidu-obs`, `supabase` -# Default: `local` -STORAGE_TYPE=local -STORAGE_LOCAL_PATH=storage +# Supported values are `opendal` , `s3` , `azure-blob` , `google-storage`, `tencent-cos`, `huawei-obs`, `volcengine-tos`, `baidu-obs`, `supabase` +# Default: `opendal` +STORAGE_TYPE=opendal + +# Apache OpenDAL Configuration, refer to https://github.com/apache/opendal +# The scheme for the OpenDAL storage. +STORAGE_OPENDAL_SCHEME=fs +# OpenDAL FS +OPENDAL_FS_ROOT=storage +# OpenDAL S3 +OPENDAL_S3_ROOT=/ +OPENDAL_S3_BUCKET=your-bucket-name +OPENDAL_S3_ENDPOINT=https://s3.amazonaws.com +OPENDAL_S3_ACCESS_KEY_ID=your-access-key +OPENDAL_S3_SECRET_ACCESS_KEY=your-secret-key +OPENDAL_S3_REGION=your-region +OPENDAL_S3_SERVER_SIDE_ENCRYPTION= # S3 Configuration # Whether to use AWS managed IAM roles for authenticating with the S3 service. diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index e8b6bdd755..56effa6293 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -292,7 +292,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:0.13.1 + image: langgenius/dify-api:0.13.2 restart: always environment: # Use the shared environment variables. @@ -312,7 +312,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:0.13.1 + image: langgenius/dify-api:0.13.2 restart: always environment: # Use the shared environment variables. @@ -331,7 +331,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:0.13.1 + image: langgenius/dify-web:0.13.2 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -493,6 +493,16 @@ services: - '${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}' - '${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}' + # The TiDB vector store. + # For production use, please refer to https://github.com/pingcap/tidb-docker-compose + tidb: + image: pingcap/tidb:v8.4.0 + ports: + - "4000:4000" + command: + - --store=unistore + restart: always + # The Weaviate vector store. weaviate: image: semitechnologies/weaviate:1.19.0 diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/layout.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/layout.tsx index 96ee874d53..7a5347c7d5 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/layout.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/layout.tsx @@ -141,7 +141,7 @@ const AppDetailLayout: FC = (props) => { if (!appDetail) { return ( -
+
) @@ -152,7 +152,7 @@ const AppDetailLayout: FC = (props) => { {appDetail && ( )} -
+
{children}
diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/chartView.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/chartView.tsx index b5d3462dfa..bb1e4fd95b 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/chartView.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/chartView.tsx @@ -28,7 +28,7 @@ export default function ChartView({ appId }: IChartViewProps) { const [period, setPeriod] = useState({ name: t('appLog.filter.period.last7days'), query: { start: today.subtract(7, 'day').startOf('day').format(queryDateFormat), end: today.endOf('day').format(queryDateFormat) } }) const onSelect = (item: Item) => { - if (item.value === '-1') { + if (item.value === -1) { setPeriod({ name: item.name, query: undefined }) } else if (item.value === 0) { diff --git a/web/app/(commonLayout)/apps/AppCard.tsx b/web/app/(commonLayout)/apps/AppCard.tsx index 1ffb132cf8..dabe75ee62 100644 --- a/web/app/(commonLayout)/apps/AppCard.tsx +++ b/web/app/(commonLayout)/apps/AppCard.tsx @@ -9,7 +9,7 @@ import s from './style.module.css' import cn from '@/utils/classnames' import type { App } from '@/types/app' import Confirm from '@/app/components/base/confirm' -import { ToastContext } from '@/app/components/base/toast' +import Toast, { ToastContext } from '@/app/components/base/toast' import { copyApp, deleteApp, exportAppConfig, updateAppInfo } from '@/service/apps' import DuplicateAppModal from '@/app/components/app/duplicate-modal' import type { DuplicateAppModalProps } from '@/app/components/app/duplicate-modal' @@ -21,8 +21,6 @@ import Divider from '@/app/components/base/divider' import { getRedirection } from '@/utils/app-redirection' import { useProviderContext } from '@/context/provider-context' import { NEED_REFRESH_APP_LIST_KEY } from '@/config' -import { AiText, ChatBot, CuteRobot } from '@/app/components/base/icons/src/vender/solid/communication' -import { Route } from '@/app/components/base/icons/src/vender/solid/mapsAndTravel' import type { CreateAppModalProps } from '@/app/components/explore/create-app-modal' import EditAppModal from '@/app/components/explore/create-app-modal' import SwitchAppModal from '@/app/components/app/switch-app-modal' @@ -31,6 +29,8 @@ import TagSelector from '@/app/components/base/tag-management/selector' import type { EnvironmentVariable } from '@/app/components/workflow/types' import DSLExportConfirmModal from '@/app/components/workflow/dsl-export-confirm-modal' import { fetchWorkflowDraft } from '@/service/workflow' +import { fetchInstalledAppList } from '@/service/explore' +import { AppTypeIcon } from '@/app/components/app/type-selector' export type AppCardProps = { app: App @@ -209,6 +209,21 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => { e.preventDefault() setShowConfirmDelete(true) } + const onClickInstalledApp = async (e: React.MouseEvent) => { + e.stopPropagation() + props.onClick?.() + e.preventDefault() + try { + const { installed_apps }: any = await fetchInstalledAppList(app.id) || {} + if (installed_apps?.length > 0) + window.open(`/explore/installed/${installed_apps[0].id}`, '_blank') + else + throw new Error('No app found in Explore') + } + catch (e: any) { + Toast.notify({ type: 'error', message: `${e.message || e}` }) + } + } return (
+
{ e.preventDefault() getRedirection(isCurrentWorkspaceEditor, app, push) }} - className='relative group col-span-1 bg-white border-2 border-solid border-transparent rounded-xl shadow-sm flex flex-col transition-all duration-200 ease-in-out cursor-pointer hover:shadow-lg' + className='relative h-[160px] group col-span-1 bg-components-card-bg border-[1px] border-solid border-components-card-border rounded-xl shadow-sm inline-flex flex-col transition-all duration-200 ease-in-out cursor-pointer hover:shadow-lg' >
@@ -268,30 +287,14 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => { background={app.icon_background} imageUrl={app.icon_url} /> - - {app.mode === 'advanced-chat' && ( - - )} - {app.mode === 'agent-chat' && ( - - )} - {app.mode === 'chat' && ( - - )} - {app.mode === 'completion' && ( - - )} - {app.mode === 'workflow' && ( - - )} - +
-
+
{app.name}
-
- {app.mode === 'advanced-chat' &&
{t('app.types.chatbot').toUpperCase()}
} +
+ {app.mode === 'advanced-chat' &&
{t('app.types.advanced').toUpperCase()}
} {app.mode === 'chat' &&
{t('app.types.chatbot').toUpperCase()}
} {app.mode === 'agent-chat' &&
{t('app.types.agent').toUpperCase()}
} {app.mode === 'workflow' &&
{t('app.types.workflow').toUpperCase()}
} @@ -299,7 +302,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
-
+
{ />
-
+
} @@ -342,7 +345,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
- +
} btnClassName={open => @@ -353,10 +356,10 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => { } popupClassName={ (app.mode === 'completion' || app.mode === 'chat') - ? '!w-[238px] translate-x-[-110px]' - : '' + ? '!w-[256px] translate-x-[-224px]' + : '!w-[160px] translate-x-[-128px]' } - className={'!w-[128px] h-fit !z-20'} + className={'h-fit !z-20'} />
diff --git a/web/app/(commonLayout)/apps/Apps.tsx b/web/app/(commonLayout)/apps/Apps.tsx index 9d6345aa6c..5269571c21 100644 --- a/web/app/(commonLayout)/apps/Apps.tsx +++ b/web/app/(commonLayout)/apps/Apps.tsx @@ -125,7 +125,7 @@ const Apps = () => { return ( <> -
+
{ />
- + {(data && data[0].total > 0) + ?
+ {isCurrentWorkspaceEditor + && } + {data.map(({ data: apps }) => apps.map(app => ( + + )))} +
+ :
+ {isCurrentWorkspaceEditor + && } + +
} +
{showTagManagementModal && ( @@ -160,3 +166,21 @@ const Apps = () => { } export default Apps + +function NoAppsFound() { + const { t } = useTranslation() + function renderDefaultCard() { + const defaultCards = Array.from({ length: 36 }, (_, index) => ( +
+ )) + return defaultCards + } + return ( + <> + {renderDefaultCard()} +
+ {t('app.newApp.noAppsFound')} +
+ + ) +} diff --git a/web/app/(commonLayout)/apps/NewAppCard.tsx b/web/app/(commonLayout)/apps/NewAppCard.tsx index c0dffa99ab..d353cf2394 100644 --- a/web/app/(commonLayout)/apps/NewAppCard.tsx +++ b/web/app/(commonLayout)/apps/NewAppCard.tsx @@ -11,13 +11,15 @@ import CreateAppModal from '@/app/components/app/create-app-modal' import CreateFromDSLModal, { CreateFromDSLModalTab } from '@/app/components/app/create-from-dsl-modal' import { useProviderContext } from '@/context/provider-context' import { FileArrow01, FilePlus01, FilePlus02 } from '@/app/components/base/icons/src/vender/line/files' +import cn from '@/utils/classnames' export type CreateAppCardProps = { + className?: string onSuccess?: () => void } // eslint-disable-next-line react/display-name -const CreateAppCard = forwardRef(({ onSuccess }, ref) => { +const CreateAppCard = forwardRef(({ className, onSuccess }, ref) => { const { t } = useTranslation() const { onPlanInfoChanged } = useProviderContext() const searchParams = useSearchParams() @@ -36,26 +38,26 @@ const CreateAppCard = forwardRef(({ onSuc }, [dslUrl]) return ( -
-
{t('app.createApp')}
-
setShowNewAppModal(true)}> +
{t('app.createApp')}
+
setShowNewAppModal(true)}> {t('app.newApp.startFromBlank')}
-
setShowNewAppTemplateDialog(true)}> +
setShowNewAppTemplateDialog(true)}> {t('app.newApp.startFromTemplate')}
setShowCreateFromDSLModal(true)} > - ) }) diff --git a/web/app/(commonLayout)/apps/page.tsx b/web/app/(commonLayout)/apps/page.tsx index ab9852e462..972aabc8bc 100644 --- a/web/app/(commonLayout)/apps/page.tsx +++ b/web/app/(commonLayout)/apps/page.tsx @@ -1,9 +1,10 @@ 'use client' import { useContextSelector } from 'use-context-selector' import { useTranslation } from 'react-i18next' +import { RiDiscordFill, RiGithubFill } from '@remixicon/react' +import Link from 'next/link' import style from '../list.module.css' import Apps from './Apps' -import classNames from '@/utils/classnames' import AppContext from '@/context/app-context' import { LicenseStatus } from '@/types/feature' @@ -12,14 +13,18 @@ const AppList = () => { const systemFeatures = useContextSelector(AppContext, v => v.systemFeatures) return ( -
+
{systemFeatures.license.status === LicenseStatus.NONE &&

{t('app.join')}

-

{t('app.communityIntro')}

+

{t('app.communityIntro')}

- - + + + + + +
}
diff --git a/web/app/(commonLayout)/list.module.css b/web/app/(commonLayout)/list.module.css index bb2aa8606c..2fc6469a6d 100644 --- a/web/app/(commonLayout)/list.module.css +++ b/web/app/(commonLayout)/list.module.css @@ -201,14 +201,6 @@ @apply block w-6 h-6 bg-center bg-contain; } -.githubIcon { - background-image: url("./apps/assets/github.svg"); -} - -.discordIcon { - background-image: url("./apps/assets/discord.svg"); -} - /* #region new app dialog */ .newItemCaption { @apply inline-flex items-center mb-2 text-sm font-medium; diff --git a/web/app/components/app-sidebar/app-info.tsx b/web/app/components/app-sidebar/app-info.tsx index 12fe5cba46..12f9c59cd1 100644 --- a/web/app/components/app-sidebar/app-info.tsx +++ b/web/app/components/app-sidebar/app-info.tsx @@ -237,7 +237,7 @@ const AppInfo = ({ expand }: IAppInfoProps) => { {appDetail.mode === 'advanced-chat' && ( <>
{t('app.types.chatbot').toUpperCase()}
-
{t('app.newApp.advanced').toUpperCase()}
+
{t('app.types.advanced').toUpperCase()}
)} {appDetail.mode === 'agent-chat' && ( @@ -246,13 +246,13 @@ const AppInfo = ({ expand }: IAppInfoProps) => { {appDetail.mode === 'chat' && ( <>
{t('app.types.chatbot').toUpperCase()}
-
{(t('app.newApp.basic').toUpperCase())}
+
{(t('app.types.basic').toUpperCase())}
)} {appDetail.mode === 'completion' && ( <>
{t('app.types.completion').toUpperCase()}
-
{(t('app.newApp.basic').toUpperCase())}
+
{(t('app.types.basic').toUpperCase())}
)} {appDetail.mode === 'workflow' && ( @@ -299,7 +299,7 @@ const AppInfo = ({ expand }: IAppInfoProps) => { {appDetail.mode === 'advanced-chat' && ( <>
{t('app.types.chatbot').toUpperCase()}
-
{t('app.newApp.advanced').toUpperCase()}
+
{t('app.types.advanced').toUpperCase()}
)} {appDetail.mode === 'agent-chat' && ( @@ -308,13 +308,13 @@ const AppInfo = ({ expand }: IAppInfoProps) => { {appDetail.mode === 'chat' && ( <>
{t('app.types.chatbot').toUpperCase()}
-
{(t('app.newApp.basic').toUpperCase())}
+
{(t('app.types.basic').toUpperCase())}
)} {appDetail.mode === 'completion' && ( <>
{t('app.types.completion').toUpperCase()}
-
{(t('app.newApp.basic').toUpperCase())}
+
{(t('app.types.basic').toUpperCase())}
)} {appDetail.mode === 'workflow' && ( @@ -398,7 +398,7 @@ const AppInfo = ({ expand }: IAppInfoProps) => { )} />
- {showSwitchTip === 'chat' ? t('app.newApp.advanced') : t('app.types.workflow')} + {showSwitchTip === 'chat' ? t('app.types.advanced') : t('app.types.workflow')} BETA
{t('app.newApp.advancedFor').toLocaleUpperCase()}
diff --git a/web/app/components/app/annotation/add-annotation-modal/edit-item/index.tsx b/web/app/components/app/annotation/add-annotation-modal/edit-item/index.tsx index 4da6b7cac4..032e4b8357 100644 --- a/web/app/components/app/annotation/add-annotation-modal/edit-item/index.tsx +++ b/web/app/components/app/annotation/add-annotation-modal/edit-item/index.tsx @@ -2,7 +2,7 @@ import type { FC } from 'react' import React from 'react' import { useTranslation } from 'react-i18next' -import Textarea from 'rc-textarea' +import Textarea from '@/app/components/base/textarea' import { Robot, User } from '@/app/components/base/icons/src/public/avatar' export enum EditItemType { @@ -31,12 +31,10 @@ const EditItem: FC = ({ {avatar}
-
{name}
+
{name}