diff --git a/.gitignore b/.gitignore index 53996f0c63..7a3f352577 100644 --- a/.gitignore +++ b/.gitignore @@ -175,6 +175,8 @@ docker/volumes/pgvector/data/* docker/volumes/pgvecto_rs/data/* docker/nginx/conf.d/default.conf +docker/nginx/ssl/* +!docker/nginx/ssl/.gitkeep docker/middleware.env sdks/python-client/build diff --git a/LICENSE b/LICENSE index 06b0fa1d12..d7b8373839 100644 --- a/LICENSE +++ b/LICENSE @@ -6,8 +6,9 @@ Dify is licensed under the Apache License 2.0, with the following additional con a. Multi-tenant service: Unless explicitly authorized by Dify in writing, you may not use the Dify source code to operate a multi-tenant environment. - Tenant Definition: Within the context of Dify, one tenant corresponds to one workspace. The workspace provides a separated area for each tenant's data and configurations. - -b. LOGO and copyright information: In the process of using Dify's frontend components, you may not remove or modify the LOGO or copyright information in the Dify console or applications. This restriction is inapplicable to uses of Dify that do not involve its frontend components. + +b. LOGO and copyright information: In the process of using Dify's frontend, you may not remove or modify the LOGO or copyright information in the Dify console or applications. This restriction is inapplicable to uses of Dify that do not involve its frontend. + - Frontend Definition: For the purposes of this license, the "frontend" of Dify includes all components located in the `web/` directory when running Dify from the raw source code, or the "web" image when running Dify with Docker. Please contact business@dify.ai by email to inquire about licensing matters. diff --git a/README.md b/README.md index 75094d39db..cd783501e2 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,9 @@ ![cover-v5-optimized](https://github.com/langgenius/dify/assets/13230914/f9e19af5-61ba-4119-b926-d10c4c06ebab) +

+ 📌 Introducing Dify Workflow File Upload: Recreate Google NotebookLM Podcast +

+

Dify Cloud · Self-hosting · @@ -168,7 +172,7 @@ Star Dify on GitHub and be instantly notified of new releases. > Before installing Dify, make sure your machine meets the following minimum system requirements: > >- CPU >= 2 Core ->- RAM >= 4GB +>- RAM >= 4 GiB
diff --git a/README_CN.md b/README_CN.md index 4553524ce5..070951699a 100644 --- a/README_CN.md +++ b/README_CN.md @@ -154,7 +154,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI 我们提供[ Dify 云服务](https://dify.ai),任何人都可以零设置尝试。它提供了自部署版本的所有功能,并在沙盒计划中包含 200 次免费的 GPT-4 调用。 - **自托管 Dify 社区版
** -使用这个[入门指南](#quick-start)快速在您的环境中运行 Dify。 +使用这个[入门指南](#快速启动)快速在您的环境中运行 Dify。 使用我们的[文档](https://docs.dify.ai)进行进一步的参考和更深入的说明。 - **面向企业/组织的 Dify
** @@ -174,7 +174,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI 在安装 Dify 之前,请确保您的机器满足以下最低系统要求: - CPU >= 2 Core -- RAM >= 4GB +- RAM >= 4 GiB ### 快速启动 diff --git a/api/.env.example b/api/.env.example index aa155003ab..184a811c51 100644 --- a/api/.env.example +++ b/api/.env.example @@ -31,8 +31,17 @@ REDIS_HOST=localhost REDIS_PORT=6379 REDIS_USERNAME= REDIS_PASSWORD=difyai123456 +REDIS_USE_SSL=false REDIS_DB=0 +# redis Sentinel configuration. +REDIS_USE_SENTINEL=false +REDIS_SENTINELS= +REDIS_SENTINEL_SERVICE_NAME= +REDIS_SENTINEL_USERNAME= +REDIS_SENTINEL_PASSWORD= +REDIS_SENTINEL_SOCKET_TIMEOUT=0.1 + # PostgreSQL database configuration DB_USERNAME=postgres DB_PASSWORD=difyai123456 @@ -42,7 +51,7 @@ DB_DATABASE=dify # Storage configuration # use for store upload files, private keys... -# storage type: local, s3, azure-blob, google-storage, tencent-cos, huawei-obs, volcengine-tos, baidu-obs, supabase +# storage type: local, s3, aliyun-oss, azure-blob, baidu-obs, google-storage, huawei-obs, oci-storage, tencent-cos, volcengine-tos, supabase STORAGE_TYPE=local STORAGE_LOCAL_PATH=storage S3_USE_AWS_MANAGED_IAM=false @@ -111,7 +120,7 @@ SUPABASE_URL=your-server-url WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* -# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, vikingdb +# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, vikingdb, upstash VECTOR_STORE=weaviate # Weaviate configuration @@ -220,6 +229,10 @@ BAIDU_VECTOR_DB_DATABASE=dify BAIDU_VECTOR_DB_SHARD=1 BAIDU_VECTOR_DB_REPLICAS=3 +# Upstash configuration +UPSTASH_VECTOR_URL=your-server-url +UPSTASH_VECTOR_TOKEN=your-access-token + # ViKingDB configuration VIKINGDB_ACCESS_KEY=your-ak VIKINGDB_SECRET_KEY=your-sk @@ -233,10 +246,13 @@ VIKINGDB_SOCKET_TIMEOUT=30 UPLOAD_FILE_SIZE_LIMIT=15 UPLOAD_FILE_BATCH_LIMIT=5 UPLOAD_IMAGE_FILE_SIZE_LIMIT=10 +UPLOAD_VIDEO_FILE_SIZE_LIMIT=100 +UPLOAD_AUDIO_FILE_SIZE_LIMIT=50 # Model Configuration MULTIMODAL_SEND_IMAGE_FORMAT=base64 PROMPT_GENERATION_MAX_TOKENS=512 +CODE_GENERATION_MAX_TOKENS=1024 # Mail configuration, support: resend, smtp MAIL_TYPE= @@ -302,6 +318,10 @@ RESPECT_XFORWARD_HEADERS_ENABLED=false # Log file path LOG_FILE= +# Log file max size, the unit is MB +LOG_FILE_MAX_SIZE=20 +# Log file max backup count +LOG_FILE_BACKUP_COUNT=5 # Indexing configuration INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=1000 @@ -310,6 +330,7 @@ INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=1000 WORKFLOW_MAX_EXECUTION_STEPS=500 WORKFLOW_MAX_EXECUTION_TIME=1200 WORKFLOW_CALL_MAX_DEPTH=5 +MAX_VARIABLE_SIZE=204800 # App configuration APP_MAX_EXECUTION_TIME=1200 @@ -327,3 +348,6 @@ POSITION_TOOL_EXCLUDES= POSITION_PROVIDER_PINS= POSITION_PROVIDER_INCLUDES= POSITION_PROVIDER_EXCLUDES= + +# Reset password token expiry minutes +RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5 diff --git a/api/.vscode/launch.json.example b/api/.vscode/launch.json.example index e9f8e42dd5..b9e32e2511 100644 --- a/api/.vscode/launch.json.example +++ b/api/.vscode/launch.json.example @@ -1,8 +1,15 @@ { "version": "0.2.0", + "compounds": [ + { + "name": "Launch Flask and Celery", + "configurations": ["Python: Flask", "Python: Celery"] + } + ], "configurations": [ { "name": "Python: Flask", + "consoleName": "Flask", "type": "debugpy", "request": "launch", "python": "${workspaceFolder}/.venv/bin/python", @@ -17,12 +24,12 @@ }, "args": [ "run", - "--host=0.0.0.0", "--port=5001" ] }, { "name": "Python: Celery", + "consoleName": "Celery", "type": "debugpy", "request": "launch", "python": "${workspaceFolder}/.venv/bin/python", @@ -45,10 +52,10 @@ "-c", "1", "--loglevel", - "info", + "DEBUG", "-Q", "dataset,generation,mail,ops_trace,app_deletion" ] - }, + } ] -} \ No newline at end of file +} diff --git a/api/Dockerfile b/api/Dockerfile index 6483f8281b..f078181264 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -55,7 +55,9 @@ RUN apt-get update \ && echo "deb http://deb.debian.org/debian testing main" > /etc/apt/sources.list \ && apt-get update \ # For Security - && apt-get install -y --no-install-recommends zlib1g=1:1.3.dfsg+really1.3.1-1 expat=2.6.3-1 libldap-2.5-0=2.5.18+dfsg-3 perl=5.38.2-5 libsqlite3-0=3.46.0-1 \ + && apt-get install -y --no-install-recommends zlib1g=1:1.3.dfsg+really1.3.1-1 expat=2.6.3-1 libldap-2.5-0=2.5.18+dfsg-3+b1 perl=5.40.0-6 libsqlite3-0=3.46.1-1 \ + # install a chinese font to support the use of tools like matplotlib + && apt-get install -y fonts-noto-cjk \ && apt-get autoremove -y \ && rm -rf /var/lib/apt/lists/* diff --git a/api/app.py b/api/app.py index 7fef62cd38..ed214bde97 100644 --- a/api/app.py +++ b/api/app.py @@ -1,5 +1,7 @@ import os +from configs import dify_config + if os.environ.get("DEBUG", "false").lower() != "true": from gevent import monkey @@ -20,6 +22,7 @@ from app_factory import create_app # DO NOT REMOVE BELOW from events import event_handlers # noqa: F401 +from extensions.ext_database import db # TODO: Find a way to avoid importing models here from models import account, dataset, model, source, task, tool, tools, web # noqa: F401 @@ -35,17 +38,11 @@ if hasattr(time, "tzset"): time.tzset() -# ------------- -# Configuration -# ------------- -config_type = os.getenv("EDITION", default="SELF_HOSTED") # ce edition first - - # create app app = create_app() celery = app.extensions["celery"] -if app.config.get("TESTING"): +if dify_config.TESTING: print("App is running in TESTING mode") @@ -53,15 +50,15 @@ if app.config.get("TESTING"): def after_request(response): """Add Version headers to the response.""" response.set_cookie("remember_token", "", expires=0) - response.headers.add("X-Version", app.config["CURRENT_VERSION"]) - response.headers.add("X-Env", app.config["DEPLOY_ENV"]) + response.headers.add("X-Version", dify_config.CURRENT_VERSION) + response.headers.add("X-Env", dify_config.DEPLOY_ENV) return response @app.route("/health") def health(): return Response( - json.dumps({"pid": os.getpid(), "status": "ok", "version": app.config["CURRENT_VERSION"]}), + json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.CURRENT_VERSION}), status=200, content_type="application/json", ) diff --git a/api/app_factory.py b/api/app_factory.py index 04654c2699..aba78ccab8 100644 --- a/api/app_factory.py +++ b/api/app_factory.py @@ -10,9 +10,6 @@ if os.environ.get("DEBUG", "false").lower() != "true": grpc.experimental.gevent.init_gevent() import json -import logging -import sys -from logging.handlers import RotatingFileHandler from flask import Flask, Response, request from flask_cors import CORS @@ -27,6 +24,7 @@ from extensions import ( ext_compress, ext_database, ext_hosting_provider, + ext_logging, ext_login, ext_mail, ext_migrate, @@ -70,43 +68,7 @@ def create_flask_app_with_configs() -> Flask: def create_app() -> Flask: app = create_flask_app_with_configs() - - app.secret_key = app.config["SECRET_KEY"] - - log_handlers = None - log_file = app.config.get("LOG_FILE") - if log_file: - log_dir = os.path.dirname(log_file) - os.makedirs(log_dir, exist_ok=True) - log_handlers = [ - RotatingFileHandler( - filename=log_file, - maxBytes=1024 * 1024 * 1024, - backupCount=5, - ), - logging.StreamHandler(sys.stdout), - ] - - logging.basicConfig( - level=app.config.get("LOG_LEVEL"), - format=app.config.get("LOG_FORMAT"), - datefmt=app.config.get("LOG_DATEFORMAT"), - handlers=log_handlers, - force=True, - ) - log_tz = app.config.get("LOG_TZ") - if log_tz: - from datetime import datetime - - import pytz - - timezone = pytz.timezone(log_tz) - - def time_converter(seconds): - return datetime.utcfromtimestamp(seconds).astimezone(timezone).timetuple() - - for handler in logging.root.handlers: - handler.formatter.converter = time_converter + app.secret_key = dify_config.SECRET_KEY initialize_extensions(app) register_blueprints(app) register_commands(app) @@ -117,6 +79,7 @@ def create_app() -> Flask: def initialize_extensions(app): # Since the application instance is now created, pass it to each Flask # extension instance to bind it to the Flask application instance (app) + ext_logging.init_app(app) ext_compress.init_app(app) ext_code_based_extension.init() ext_database.init_app(app) @@ -187,7 +150,7 @@ def register_blueprints(app): CORS( web_bp, - resources={r"/*": {"origins": app.config["WEB_API_CORS_ALLOW_ORIGINS"]}}, + resources={r"/*": {"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS}}, supports_credentials=True, allow_headers=["Content-Type", "Authorization", "X-App-Code"], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], @@ -198,7 +161,7 @@ def register_blueprints(app): CORS( console_app_bp, - resources={r"/*": {"origins": app.config["CONSOLE_CORS_ALLOW_ORIGINS"]}}, + resources={r"/*": {"origins": dify_config.CONSOLE_CORS_ALLOW_ORIGINS}}, supports_credentials=True, allow_headers=["Content-Type", "Authorization"], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], diff --git a/api/commands.py b/api/commands.py index 5b7f79c8f0..720a4447da 100644 --- a/api/commands.py +++ b/api/commands.py @@ -19,7 +19,7 @@ from extensions.ext_redis import redis_client from libs.helper import email as email_validate from libs.password import hash_password, password_pattern, valid_password from libs.rsa import generate_key_pair -from models.account import Tenant +from models import Tenant from models.dataset import Dataset, DatasetCollectionBinding, DocumentSegment from models.dataset import Document as DatasetDocument from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation @@ -277,6 +277,7 @@ def migrate_knowledge_vector_database(): VectorType.TENCENT, VectorType.BAIDU, VectorType.VIKINGDB, + VectorType.UPSTASH, } page = 1 while True: @@ -426,14 +427,14 @@ def convert_to_agent_apps(): # fetch first 1000 apps sql_query = """SELECT a.id AS id FROM apps a INNER JOIN app_model_configs am ON a.app_model_config_id=am.id - WHERE a.mode = 'chat' - AND am.agent_mode is not null + WHERE a.mode = 'chat' + AND am.agent_mode is not null AND ( - am.agent_mode like '%"strategy": "function_call"%' + am.agent_mode like '%"strategy": "function_call"%' OR am.agent_mode like '%"strategy": "react"%' - ) + ) AND ( - am.agent_mode like '{"enabled": true%' + am.agent_mode like '{"enabled": true%' OR am.agent_mode like '{"max_iteration": %' ) ORDER BY a.created_at DESC LIMIT 1000 """ diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index c44a51ee4b..0fa926038d 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -1,6 +1,15 @@ -from typing import Annotated, Optional +from typing import Annotated, Literal, Optional -from pydantic import AliasChoices, Field, HttpUrl, NegativeInt, NonNegativeInt, PositiveInt, computed_field +from pydantic import ( + AliasChoices, + Field, + HttpUrl, + NegativeInt, + NonNegativeInt, + PositiveFloat, + PositiveInt, + computed_field, +) from pydantic_settings import BaseSettings from configs.feature.hosted_service import HostedServiceConfig @@ -11,16 +20,31 @@ class SecurityConfig(BaseSettings): Security-related configurations for the application """ - SECRET_KEY: Optional[str] = Field( + SECRET_KEY: str = Field( description="Secret key for secure session cookie signing." "Make sure you are changing this key for your deployment with a strong key." "Generate a strong key using `openssl rand -base64 42` or set via the `SECRET_KEY` environment variable.", - default=None, + default="", ) - RESET_PASSWORD_TOKEN_EXPIRY_HOURS: PositiveInt = Field( - description="Duration in hours for which a password reset token remains valid", - default=24, + RESET_PASSWORD_TOKEN_EXPIRY_MINUTES: PositiveInt = Field( + description="Duration in minutes for which a password reset token remains valid", + default=5, + ) + + LOGIN_DISABLED: bool = Field( + description="Whether to disable login checks", + default=False, + ) + + ADMIN_API_KEY_ENABLE: bool = Field( + description="Whether to enable admin api key for authentication", + default=False, + ) + + ADMIN_API_KEY: Optional[str] = Field( + description="admin api key for authentication", + default=None, ) @@ -177,6 +201,16 @@ class FileUploadConfig(BaseSettings): default=10, ) + UPLOAD_VIDEO_FILE_SIZE_LIMIT: NonNegativeInt = Field( + description="video file size limit in Megabytes for uploading files", + default=100, + ) + + UPLOAD_AUDIO_FILE_SIZE_LIMIT: NonNegativeInt = Field( + description="audio file size limit in Megabytes for uploading files", + default=50, + ) + BATCH_UPLOAD_LIMIT: NonNegativeInt = Field( description="Maximum number of files allowed in a batch upload operation", default=20, @@ -285,6 +319,16 @@ class LoggingConfig(BaseSettings): default=None, ) + LOG_FILE_MAX_SIZE: PositiveInt = Field( + description="Maximum file size for file rotation retention, the unit is megabytes (MB)", + default=20, + ) + + LOG_FILE_BACKUP_COUNT: PositiveInt = Field( + description="Maximum file backup count file rotation retention", + default=5, + ) + LOG_FORMAT: str = Field( description="Format string for log messages", default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s", @@ -355,8 +399,8 @@ class WorkflowConfig(BaseSettings): ) MAX_VARIABLE_SIZE: PositiveInt = Field( - description="Maximum size in bytes for a single variable in workflows. Default to 5KB.", - default=5 * 1024, + description="Maximum size in bytes for a single variable in workflows. Default to 200 KB.", + default=200 * 1024, ) @@ -473,12 +517,18 @@ class MailConfig(BaseSettings): default=False, ) + EMAIL_SEND_IP_LIMIT_PER_MINUTE: PositiveInt = Field( + description="Maximum number of emails allowed to be sent from the same IP address in a minute", + default=50, + ) + class RagEtlConfig(BaseSettings): """ Configuration for RAG ETL processes """ + # TODO: This config is not only for rag etl, it is also for file upload, we should move it to file upload config ETL_TYPE: str = Field( description="RAG ETL type ('dify' or 'Unstructured'), default to 'dify'", default="dify", @@ -521,6 +571,11 @@ class DataSetConfig(BaseSettings): default=False, ) + TIDB_SERVERLESS_NUMBER: PositiveInt = Field( + description="number of tidb serverless cluster", + default=500, + ) + class WorkspaceConfig(BaseSettings): """ @@ -545,7 +600,7 @@ class IndexingConfig(BaseSettings): class ImageFormatConfig(BaseSettings): - MULTIMODAL_SEND_IMAGE_FORMAT: str = Field( + MULTIMODAL_SEND_IMAGE_FORMAT: Literal["base64", "url"] = Field( description="Format for sending images in multimodal contexts ('base64' or 'url'), default is base64", default="base64", ) @@ -614,6 +669,33 @@ class PositionConfig(BaseSettings): return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""} +class LoginConfig(BaseSettings): + ENABLE_EMAIL_CODE_LOGIN: bool = Field( + description="whether to enable email code login", + default=False, + ) + ENABLE_EMAIL_PASSWORD_LOGIN: bool = Field( + description="whether to enable email password login", + default=True, + ) + ENABLE_SOCIAL_OAUTH_LOGIN: bool = Field( + description="whether to enable github/google oauth login", + default=False, + ) + EMAIL_CODE_LOGIN_TOKEN_EXPIRY_MINUTES: PositiveInt = Field( + description="expiry time in minutes for email code login token", + default=5, + ) + ALLOW_REGISTER: bool = Field( + description="whether to enable register", + default=False, + ) + ALLOW_CREATE_WORKSPACE: bool = Field( + description="whether to enable create workspace", + default=False, + ) + + class FeatureConfig( # place the configs in alphabet order AppExecutionConfig, @@ -639,6 +721,7 @@ class FeatureConfig( UpdateConfig, WorkflowConfig, WorkspaceConfig, + LoginConfig, # hosted services config HostedServiceConfig, CeleryBeatConfig, diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index fa7f41d630..3d68e29d0e 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -27,7 +27,9 @@ from configs.middleware.vdb.pgvectors_config import PGVectoRSConfig from configs.middleware.vdb.qdrant_config import QdrantConfig from configs.middleware.vdb.relyt_config import RelytConfig from configs.middleware.vdb.tencent_vector_config import TencentVectorDBConfig +from configs.middleware.vdb.tidb_on_qdrant_config import TidbOnQdrantConfig from configs.middleware.vdb.tidb_vector_config import TiDBVectorConfig +from configs.middleware.vdb.upstash_config import UpstashConfig from configs.middleware.vdb.vikingdb_config import VikingDBConfig from configs.middleware.vdb.weaviate_config import WeaviateConfig @@ -35,7 +37,8 @@ from configs.middleware.vdb.weaviate_config import WeaviateConfig class StorageConfig(BaseSettings): STORAGE_TYPE: str = Field( description="Type of storage to use." - " Options: 'local', 's3', 'azure-blob', 'aliyun-oss', 'google-storage'. Default is 'local'.", + " Options: 'local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', 'huawei-obs', " + "'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'local'.", default="local", ) @@ -52,6 +55,11 @@ class VectorStoreConfig(BaseSettings): default=None, ) + VECTOR_STORE_WHITELIST_ENABLE: Optional[bool] = Field( + description="Enable whitelist for vector store.", + default=False, + ) + class KeywordStoreConfig(BaseSettings): KEYWORD_STORE: str = Field( @@ -245,5 +253,7 @@ class MiddlewareConfig( ElasticsearchConfig, InternalTestConfig, VikingDBConfig, + UpstashConfig, + TidbOnQdrantConfig, ): pass diff --git a/api/configs/middleware/vdb/tidb_on_qdrant_config.py b/api/configs/middleware/vdb/tidb_on_qdrant_config.py new file mode 100644 index 0000000000..98268798ef --- /dev/null +++ b/api/configs/middleware/vdb/tidb_on_qdrant_config.py @@ -0,0 +1,65 @@ +from typing import Optional + +from pydantic import Field, NonNegativeInt, PositiveInt +from pydantic_settings import BaseSettings + + +class TidbOnQdrantConfig(BaseSettings): + """ + Tidb on Qdrant configs + """ + + TIDB_ON_QDRANT_URL: Optional[str] = Field( + description="Tidb on Qdrant url", + default=None, + ) + + TIDB_ON_QDRANT_API_KEY: Optional[str] = Field( + description="Tidb on Qdrant api key", + default=None, + ) + + TIDB_ON_QDRANT_CLIENT_TIMEOUT: NonNegativeInt = Field( + description="Tidb on Qdrant client timeout in seconds", + default=20, + ) + + TIDB_ON_QDRANT_GRPC_ENABLED: bool = Field( + description="whether enable grpc support for Tidb on Qdrant connection", + default=False, + ) + + TIDB_ON_QDRANT_GRPC_PORT: PositiveInt = Field( + description="Tidb on Qdrant grpc port", + default=6334, + ) + + TIDB_PUBLIC_KEY: Optional[str] = Field( + description="Tidb account public key", + default=None, + ) + + TIDB_PRIVATE_KEY: Optional[str] = Field( + description="Tidb account private key", + default=None, + ) + + TIDB_API_URL: Optional[str] = Field( + description="Tidb API url", + default=None, + ) + + TIDB_IAM_API_URL: Optional[str] = Field( + description="Tidb IAM API url", + default=None, + ) + + TIDB_REGION: Optional[str] = Field( + description="Tidb serverless region", + default="regions/aws-us-east-1", + ) + + TIDB_PROJECT_ID: Optional[str] = Field( + description="Tidb project id", + default=None, + ) diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py index c832fb671e..389a64f53e 100644 --- a/api/configs/packaging/__init__.py +++ b/api/configs/packaging/__init__.py @@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings): CURRENT_VERSION: str = Field( description="Dify version", - default="0.9.2", + default="0.10.1", ) COMMIT_SHA: str = Field( diff --git a/api/constants/__init__.py b/api/constants/__init__.py index 75eaf81638..05795e11d7 100644 --- a/api/constants/__init__.py +++ b/api/constants/__init__.py @@ -1,2 +1,24 @@ +from configs import dify_config + HIDDEN_VALUE = "[__HIDDEN__]" UUID_NIL = "00000000-0000-0000-0000-000000000000" + +IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"] +IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS]) + +VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "mpga"] +VIDEO_EXTENSIONS.extend([ext.upper() for ext in VIDEO_EXTENSIONS]) + +AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "webm", "amr"] +AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS]) + + +if dify_config.ETL_TYPE == "Unstructured": + DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls"] + DOCUMENT_EXTENSIONS.extend(("docx", "csv", "eml", "msg", "pptx", "xml", "epub")) + if dify_config.UNSTRUCTURED_API_URL: + DOCUMENT_EXTENSIONS.append("ppt") + DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS]) +else: + DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls", "docx", "csv"] + DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS]) diff --git a/api/contexts/__init__.py b/api/contexts/__init__.py index 623a1a28eb..85380b7330 100644 --- a/api/contexts/__init__.py +++ b/api/contexts/__init__.py @@ -1,7 +1,9 @@ from contextvars import ContextVar +from typing import TYPE_CHECKING -from core.workflow.entities.variable_pool import VariablePool +if TYPE_CHECKING: + from core.workflow.entities.variable_pool import VariablePool tenant_id: ContextVar[str] = ContextVar("tenant_id") -workflow_variable_pool: ContextVar[VariablePool] = ContextVar("workflow_variable_pool") +workflow_variable_pool: ContextVar["VariablePool"] = ContextVar("workflow_variable_pool") diff --git a/api/controllers/console/admin.py b/api/controllers/console/admin.py index f78ea9b288..a70c4a31c7 100644 --- a/api/controllers/console/admin.py +++ b/api/controllers/console/admin.py @@ -1,10 +1,10 @@ -import os from functools import wraps from flask import request from flask_restful import Resource, reqparse from werkzeug.exceptions import NotFound, Unauthorized +from configs import dify_config from constants.languages import supported_language from controllers.console import api from controllers.console.wraps import only_edition_cloud @@ -15,7 +15,7 @@ from models.model import App, InstalledApp, RecommendedApp def admin_required(view): @wraps(view) def decorated(*args, **kwargs): - if not os.getenv("ADMIN_API_KEY"): + if not dify_config.ADMIN_API_KEY: raise Unauthorized("API key is invalid.") auth_header = request.headers.get("Authorization") @@ -31,7 +31,7 @@ def admin_required(view): if auth_scheme != "bearer": raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - if os.getenv("ADMIN_API_KEY") != auth_token: + if dify_config.ADMIN_API_KEY != auth_token: raise Unauthorized("API key is invalid.") return view(*args, **kwargs) diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index c1e16b3b9b..b60a424d98 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -22,7 +22,8 @@ from fields.conversation_fields import ( ) from libs.helper import DatetimeString from libs.login import login_required -from models.model import AppMode, Conversation, EndUser, Message, MessageAnnotation +from models import Conversation, EndUser, Message, MessageAnnotation +from models.model import AppMode class CompletionConversationApi(Resource): diff --git a/api/controllers/console/app/generator.py b/api/controllers/console/app/generator.py index 3d1e6b7a37..7108759b0b 100644 --- a/api/controllers/console/app/generator.py +++ b/api/controllers/console/app/generator.py @@ -52,4 +52,39 @@ class RuleGenerateApi(Resource): return rules +class RuleCodeGenerateApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self): + parser = reqparse.RequestParser() + parser.add_argument("instruction", type=str, required=True, nullable=False, location="json") + parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") + parser.add_argument("no_variable", type=bool, required=True, default=False, location="json") + parser.add_argument("code_language", type=str, required=False, default="javascript", location="json") + args = parser.parse_args() + + account = current_user + CODE_GENERATION_MAX_TOKENS = int(os.getenv("CODE_GENERATION_MAX_TOKENS", "1024")) + try: + code_result = LLMGenerator.generate_code( + tenant_id=account.current_tenant_id, + instruction=args["instruction"], + model_config=args["model_config"], + code_language=args["code_language"], + max_tokens=CODE_GENERATION_MAX_TOKENS, + ) + except ProviderTokenNotInitError as ex: + raise ProviderNotInitializeError(ex.description) + except QuotaExceededError: + raise ProviderQuotaExceededError() + except ModelCurrentlyNotSupportError: + raise ProviderModelCurrentlyNotSupportError() + except InvokeError as e: + raise CompletionRequestError(e.description) + + return code_result + + api.add_resource(RuleGenerateApi, "/rule-generate") +api.add_resource(RuleCodeGenerateApi, "/rule-code-generate") diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py index 2fba3e0af0..fe06201982 100644 --- a/api/controllers/console/app/message.py +++ b/api/controllers/console/app/message.py @@ -105,6 +105,8 @@ class ChatMessageListApi(Resource): if rest_count > 0: has_more = True + history_messages = list(reversed(history_messages)) + return InfiniteScrollPagination(data=history_messages, limit=args["limit"], has_more=has_more) diff --git a/api/controllers/console/app/site.py b/api/controllers/console/app/site.py index 26da1ef26d..115a832da9 100644 --- a/api/controllers/console/app/site.py +++ b/api/controllers/console/app/site.py @@ -12,7 +12,7 @@ from controllers.console.wraps import account_initialization_required from extensions.ext_database import db from fields.app_fields import app_site_fields from libs.login import login_required -from models.model import Site +from models import Site def parse_app_site_args(): diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index 0a693b84e2..a8f601aeee 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -13,14 +13,14 @@ from controllers.console.setup import setup_required from controllers.console.wraps import account_initialization_required from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.entities.app_invoke_entities import InvokeFrom -from core.app.segments import factory -from core.errors.error import AppInvokeQuotaExceededError +from factories import variable_factory from fields.workflow_fields import workflow_fields from fields.workflow_run_fields import workflow_run_node_execution_fields from libs import helper from libs.helper import TimestampField, uuid_value from libs.login import current_user, login_required -from models.model import App, AppMode +from models import App +from models.model import AppMode from services.app_dsl_service import AppDslService from services.app_generate_service import AppGenerateService from services.errors.app import WorkflowHashNotEqualError @@ -101,9 +101,13 @@ class DraftWorkflowApi(Resource): try: environment_variables_list = args.get("environment_variables") or [] - environment_variables = [factory.build_variable_from_mapping(obj) for obj in environment_variables_list] + environment_variables = [ + variable_factory.build_variable_from_mapping(obj) for obj in environment_variables_list + ] conversation_variables_list = args.get("conversation_variables") or [] - conversation_variables = [factory.build_variable_from_mapping(obj) for obj in conversation_variables_list] + conversation_variables = [ + variable_factory.build_variable_from_mapping(obj) for obj in conversation_variables_list + ] workflow = workflow_service.sync_draft_workflow( app_model=app_model, graph=args["graph"], @@ -273,17 +277,15 @@ class DraftWorkflowRunApi(Resource): parser.add_argument("files", type=list, required=False, location="json") args = parser.parse_args() - try: - response = AppGenerateService.generate( - app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=True - ) + response = AppGenerateService.generate( + app_model=app_model, + user=current_user, + args=args, + invoke_from=InvokeFrom.DEBUGGER, + streaming=True, + ) - return helper.compact_generate_response(response) - except (ValueError, AppInvokeQuotaExceededError) as e: - raise e - except Exception as e: - logging.exception("internal server error.") - raise InternalServerError() + return helper.compact_generate_response(response) class WorkflowTaskStopApi(Resource): diff --git a/api/controllers/console/app/workflow_app_log.py b/api/controllers/console/app/workflow_app_log.py index dc962409cc..629b7a8bf4 100644 --- a/api/controllers/console/app/workflow_app_log.py +++ b/api/controllers/console/app/workflow_app_log.py @@ -7,7 +7,8 @@ from controllers.console.setup import setup_required from controllers.console.wraps import account_initialization_required from fields.workflow_app_log_fields import workflow_app_log_pagination_fields from libs.login import login_required -from models.model import App, AppMode +from models import App +from models.model import AppMode from services.workflow_app_service import WorkflowAppService diff --git a/api/controllers/console/app/workflow_run.py b/api/controllers/console/app/workflow_run.py index a055d03deb..5824ead9c3 100644 --- a/api/controllers/console/app/workflow_run.py +++ b/api/controllers/console/app/workflow_run.py @@ -13,7 +13,8 @@ from fields.workflow_run_fields import ( ) from libs.helper import uuid_value from libs.login import login_required -from models.model import App, AppMode +from models import App +from models.model import AppMode from services.workflow_run_service import WorkflowRunService diff --git a/api/controllers/console/app/workflow_statistic.py b/api/controllers/console/app/workflow_statistic.py index c7e54f2be0..f46af0f1ca 100644 --- a/api/controllers/console/app/workflow_statistic.py +++ b/api/controllers/console/app/workflow_statistic.py @@ -13,8 +13,8 @@ from controllers.console.wraps import account_initialization_required from extensions.ext_database import db from libs.helper import DatetimeString from libs.login import login_required +from models.enums import WorkflowRunTriggeredFrom from models.model import AppMode -from models.workflow import WorkflowRunTriggeredFrom class WorkflowDailyRunsStatistic(Resource): diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py index 5e0a4bc814..c71ee8e5df 100644 --- a/api/controllers/console/app/wraps.py +++ b/api/controllers/console/app/wraps.py @@ -5,7 +5,8 @@ from typing import Optional, Union from controllers.console.app.error import AppNotFoundError from extensions.ext_database import db from libs.login import current_user -from models.model import App, AppMode +from models import App +from models.model import AppMode def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode]] = None): diff --git a/api/controllers/console/auth/activate.py b/api/controllers/console/auth/activate.py index f3198dfc1d..be353cefac 100644 --- a/api/controllers/console/auth/activate.py +++ b/api/controllers/console/auth/activate.py @@ -1,17 +1,15 @@ -import base64 import datetime -import secrets +from flask import request from flask_restful import Resource, reqparse from constants.languages import supported_language from controllers.console import api from controllers.console.error import AlreadyActivateError from extensions.ext_database import db -from libs.helper import StrLen, email, timezone -from libs.password import hash_password, valid_password -from models.account import AccountStatus -from services.account_service import RegisterService +from libs.helper import StrLen, email, extract_remote_ip, timezone +from models.account import AccountStatus, Tenant +from services.account_service import AccountService, RegisterService class ActivateCheckApi(Resource): @@ -27,8 +25,18 @@ class ActivateCheckApi(Resource): token = args["token"] invitation = RegisterService.get_invitation_if_token_valid(workspaceId, reg_email, token) - - return {"is_valid": invitation is not None, "workspace_name": invitation["tenant"].name if invitation else None} + if invitation: + data = invitation.get("data", {}) + tenant: Tenant = invitation.get("tenant", None) + workspace_name = tenant.name if tenant else None + workspace_id = tenant.id if tenant else None + invitee_email = data.get("email") if data else None + return { + "is_valid": invitation is not None, + "data": {"workspace_name": workspace_name, "workspace_id": workspace_id, "email": invitee_email}, + } + else: + return {"is_valid": False} class ActivateApi(Resource): @@ -38,7 +46,6 @@ class ActivateApi(Resource): parser.add_argument("email", type=email, required=False, nullable=True, location="json") parser.add_argument("token", type=str, required=True, nullable=False, location="json") parser.add_argument("name", type=StrLen(30), required=True, nullable=False, location="json") - parser.add_argument("password", type=valid_password, required=True, nullable=False, location="json") parser.add_argument( "interface_language", type=supported_language, required=True, nullable=False, location="json" ) @@ -54,15 +61,6 @@ class ActivateApi(Resource): account = invitation["account"] account.name = args["name"] - # generate password salt - salt = secrets.token_bytes(16) - base64_salt = base64.b64encode(salt).decode() - - # encrypt password with salt - password_hashed = hash_password(args["password"], salt) - base64_password_hashed = base64.b64encode(password_hashed).decode() - account.password = base64_password_hashed - account.password_salt = base64_salt account.interface_language = args["interface_language"] account.timezone = args["timezone"] account.interface_theme = "light" @@ -70,7 +68,9 @@ class ActivateApi(Resource): account.initialized_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() - return {"result": "success"} + token_pair = AccountService.login(account, ip_address=extract_remote_ip(request)) + + return {"result": "success", "data": token_pair.model_dump()} api.add_resource(ActivateCheckApi, "/activate/check") diff --git a/api/controllers/console/auth/error.py b/api/controllers/console/auth/error.py index ea23e097d0..e6e30c3c0b 100644 --- a/api/controllers/console/auth/error.py +++ b/api/controllers/console/auth/error.py @@ -27,5 +27,29 @@ class InvalidTokenError(BaseHTTPException): class PasswordResetRateLimitExceededError(BaseHTTPException): error_code = "password_reset_rate_limit_exceeded" - description = "Password reset rate limit exceeded. Try again later." + description = "Too many password reset emails have been sent. Please try again in 1 minutes." + code = 429 + + +class EmailCodeError(BaseHTTPException): + error_code = "email_code_error" + description = "Email code is invalid or expired." + code = 400 + + +class EmailOrPasswordMismatchError(BaseHTTPException): + error_code = "email_or_password_mismatch" + description = "The email or password is mismatched." + code = 400 + + +class EmailPasswordLoginLimitError(BaseHTTPException): + error_code = "email_code_login_limit" + description = "Too many incorrect password attempts. Please try again later." + code = 429 + + +class EmailCodeLoginRateLimitExceededError(BaseHTTPException): + error_code = "email_code_login_rate_limit_exceeded" + description = "Too many login emails have been sent. Please try again in 5 minutes." code = 429 diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py index 0b01a4906a..7fea610610 100644 --- a/api/controllers/console/auth/forgot_password.py +++ b/api/controllers/console/auth/forgot_password.py @@ -1,65 +1,82 @@ import base64 -import logging import secrets +from flask import request from flask_restful import Resource, reqparse +from constants.languages import languages from controllers.console import api from controllers.console.auth.error import ( + EmailCodeError, InvalidEmailError, InvalidTokenError, PasswordMismatchError, - PasswordResetRateLimitExceededError, ) +from controllers.console.error import EmailSendIpLimitError, NotAllowedRegister from controllers.console.setup import setup_required +from events.tenant_event import tenant_was_created from extensions.ext_database import db -from libs.helper import email as email_validate +from libs.helper import email, extract_remote_ip from libs.password import hash_password, valid_password from models.account import Account -from services.account_service import AccountService -from services.errors.account import RateLimitExceededError +from services.account_service import AccountService, TenantService +from services.errors.workspace import WorkSpaceNotAllowedCreateError +from services.feature_service import FeatureService class ForgotPasswordSendEmailApi(Resource): @setup_required def post(self): parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") + parser.add_argument("email", type=email, required=True, location="json") + parser.add_argument("language", type=str, required=False, location="json") args = parser.parse_args() - email = args["email"] + ip_address = extract_remote_ip(request) + if AccountService.is_email_send_ip_limit(ip_address): + raise EmailSendIpLimitError() - if not email_validate(email): - raise InvalidEmailError() - - account = Account.query.filter_by(email=email).first() - - if account: - try: - AccountService.send_reset_password_email(account=account) - except RateLimitExceededError: - logging.warning(f"Rate limit exceeded for email: {account.email}") - raise PasswordResetRateLimitExceededError() + if args["language"] is not None and args["language"] == "zh-Hans": + language = "zh-Hans" else: - # Return success to avoid revealing email registration status - logging.warning(f"Attempt to reset password for unregistered email: {email}") + language = "en-US" - return {"result": "success"} + account = Account.query.filter_by(email=args["email"]).first() + token = None + if account is None: + if FeatureService.get_system_features().is_allow_register: + token = AccountService.send_reset_password_email(email=args["email"], language=language) + return {"result": "fail", "data": token, "code": "account_not_found"} + else: + raise NotAllowedRegister() + else: + token = AccountService.send_reset_password_email(account=account, email=args["email"], language=language) + + return {"result": "success", "data": token} class ForgotPasswordCheckApi(Resource): @setup_required def post(self): parser = reqparse.RequestParser() + parser.add_argument("email", type=str, required=True, location="json") + parser.add_argument("code", type=str, required=True, location="json") parser.add_argument("token", type=str, required=True, nullable=False, location="json") args = parser.parse_args() - token = args["token"] - reset_data = AccountService.get_reset_password_data(token) + user_email = args["email"] - if reset_data is None: - return {"is_valid": False, "email": None} - return {"is_valid": True, "email": reset_data.get("email")} + token_data = AccountService.get_reset_password_data(args["token"]) + if token_data is None: + raise InvalidTokenError() + + if user_email != token_data.get("email"): + raise InvalidEmailError() + + if args["code"] != token_data.get("code"): + raise EmailCodeError() + + return {"is_valid": True, "email": token_data.get("email")} class ForgotPasswordResetApi(Resource): @@ -92,9 +109,26 @@ class ForgotPasswordResetApi(Resource): base64_password_hashed = base64.b64encode(password_hashed).decode() account = Account.query.filter_by(email=reset_data.get("email")).first() - account.password = base64_password_hashed - account.password_salt = base64_salt - db.session.commit() + if account: + account.password = base64_password_hashed + account.password_salt = base64_salt + db.session.commit() + tenant = TenantService.get_join_tenants(account) + if not tenant and not FeatureService.get_system_features().is_allow_create_workspace: + tenant = TenantService.create_tenant(f"{account.name}'s Workspace") + TenantService.create_tenant_member(tenant, account, role="owner") + account.current_tenant = tenant + tenant_was_created.send(tenant) + else: + try: + account = AccountService.create_account_and_tenant( + email=reset_data.get("email"), + name=reset_data.get("email"), + password=password_confirm, + interface_language=languages[0], + ) + except WorkSpaceNotAllowedCreateError: + pass return {"result": "success"} diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index 18a7b23166..6c795f95b6 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -5,12 +5,29 @@ from flask import request from flask_restful import Resource, reqparse import services +from constants.languages import languages from controllers.console import api +from controllers.console.auth.error import ( + EmailCodeError, + EmailOrPasswordMismatchError, + EmailPasswordLoginLimitError, + InvalidEmailError, + InvalidTokenError, +) +from controllers.console.error import ( + AccountBannedError, + EmailSendIpLimitError, + NotAllowedCreateWorkspace, + NotAllowedRegister, +) from controllers.console.setup import setup_required +from events.tenant_event import tenant_was_created from libs.helper import email, extract_remote_ip from libs.password import valid_password from models.account import Account -from services.account_service import AccountService, TenantService +from services.account_service import AccountService, RegisterService, TenantService +from services.errors.workspace import WorkSpaceNotAllowedCreateError +from services.feature_service import FeatureService class LoginApi(Resource): @@ -23,15 +40,43 @@ class LoginApi(Resource): parser.add_argument("email", type=email, required=True, location="json") parser.add_argument("password", type=valid_password, required=True, location="json") parser.add_argument("remember_me", type=bool, required=False, default=False, location="json") + parser.add_argument("invite_token", type=str, required=False, default=None, location="json") + parser.add_argument("language", type=str, required=False, default="en-US", location="json") args = parser.parse_args() - # todo: Verify the recaptcha + is_login_error_rate_limit = AccountService.is_login_error_rate_limit(args["email"]) + if is_login_error_rate_limit: + raise EmailPasswordLoginLimitError() + + invitation = args["invite_token"] + if invitation: + invitation = RegisterService.get_invitation_if_token_valid(None, args["email"], invitation) + + if args["language"] is not None and args["language"] == "zh-Hans": + language = "zh-Hans" + else: + language = "en-US" try: - account = AccountService.authenticate(args["email"], args["password"]) - except services.errors.account.AccountLoginError as e: - return {"code": "unauthorized", "message": str(e)}, 401 - + if invitation: + data = invitation.get("data", {}) + invitee_email = data.get("email") if data else None + if invitee_email != args["email"]: + raise InvalidEmailError() + account = AccountService.authenticate(args["email"], args["password"], args["invite_token"]) + else: + account = AccountService.authenticate(args["email"], args["password"]) + except services.errors.account.AccountLoginError: + raise AccountBannedError() + except services.errors.account.AccountPasswordError: + AccountService.add_login_error_rate_limit(args["email"]) + raise EmailOrPasswordMismatchError() + except services.errors.account.AccountNotFoundError: + if FeatureService.get_system_features().is_allow_register: + token = AccountService.send_reset_password_email(email=args["email"], language=language) + return {"result": "fail", "data": token, "code": "account_not_found"} + else: + raise NotAllowedRegister() # SELF_HOSTED only have one workspace tenants = TenantService.get_join_tenants(account) if len(tenants) == 0: @@ -41,7 +86,7 @@ class LoginApi(Resource): } token_pair = AccountService.login(account=account, ip_address=extract_remote_ip(request)) - + AccountService.reset_login_error_rate_limit(args["email"]) return {"result": "success", "data": token_pair.model_dump()} @@ -49,60 +94,111 @@ class LogoutApi(Resource): @setup_required def get(self): account = cast(Account, flask_login.current_user) + if isinstance(account, flask_login.AnonymousUserMixin): + return {"result": "success"} AccountService.logout(account=account) flask_login.logout_user() return {"result": "success"} -class ResetPasswordApi(Resource): +class ResetPasswordSendEmailApi(Resource): @setup_required - def get(self): - # parser = reqparse.RequestParser() - # parser.add_argument('email', type=email, required=True, location='json') - # args = parser.parse_args() + def post(self): + parser = reqparse.RequestParser() + parser.add_argument("email", type=email, required=True, location="json") + parser.add_argument("language", type=str, required=False, location="json") + args = parser.parse_args() - # import mailchimp_transactional as MailchimpTransactional - # from mailchimp_transactional.api_client import ApiClientError + if args["language"] is not None and args["language"] == "zh-Hans": + language = "zh-Hans" + else: + language = "en-US" - # account = {'email': args['email']} - # account = AccountService.get_by_email(args['email']) - # if account is None: - # raise ValueError('Email not found') - # new_password = AccountService.generate_password() - # AccountService.update_password(account, new_password) + account = AccountService.get_user_through_email(args["email"]) + if account is None: + if FeatureService.get_system_features().is_allow_register: + token = AccountService.send_reset_password_email(email=args["email"], language=language) + else: + raise NotAllowedRegister() + else: + token = AccountService.send_reset_password_email(account=account, language=language) - # todo: Send email - # MAILCHIMP_API_KEY = dify_config.MAILCHIMP_TRANSACTIONAL_API_KEY - # mailchimp = MailchimpTransactional(MAILCHIMP_API_KEY) + return {"result": "success", "data": token} - # message = { - # 'from_email': 'noreply@example.com', - # 'to': [{'email': account['email']}], - # 'subject': 'Reset your Dify password', - # 'html': """ - #

Dear User,

- #

The Dify team has generated a new password for you, details as follows:

- #

{new_password}

- #

Please change your password to log in as soon as possible.

- #

Regards,

- #

The Dify Team

- # """ - # } - # response = mailchimp.messages.send({ - # 'message': message, - # # required for transactional email - # ' settings': { - # 'sandbox_mode': dify_config.MAILCHIMP_SANDBOX_MODE, - # }, - # }) +class EmailCodeLoginSendEmailApi(Resource): + @setup_required + def post(self): + parser = reqparse.RequestParser() + parser.add_argument("email", type=email, required=True, location="json") + parser.add_argument("language", type=str, required=False, location="json") + args = parser.parse_args() - # Check if MSG was sent - # if response.status_code != 200: - # # handle error - # pass + ip_address = extract_remote_ip(request) + if AccountService.is_email_send_ip_limit(ip_address): + raise EmailSendIpLimitError() - return {"result": "success"} + if args["language"] is not None and args["language"] == "zh-Hans": + language = "zh-Hans" + else: + language = "en-US" + + account = AccountService.get_user_through_email(args["email"]) + if account is None: + if FeatureService.get_system_features().is_allow_register: + token = AccountService.send_email_code_login_email(email=args["email"], language=language) + else: + raise NotAllowedRegister() + else: + token = AccountService.send_email_code_login_email(account=account, language=language) + + return {"result": "success", "data": token} + + +class EmailCodeLoginApi(Resource): + @setup_required + def post(self): + parser = reqparse.RequestParser() + parser.add_argument("email", type=str, required=True, location="json") + parser.add_argument("code", type=str, required=True, location="json") + parser.add_argument("token", type=str, required=True, location="json") + args = parser.parse_args() + + user_email = args["email"] + + token_data = AccountService.get_email_code_login_data(args["token"]) + if token_data is None: + raise InvalidTokenError() + + if token_data["email"] != args["email"]: + raise InvalidEmailError() + + if token_data["code"] != args["code"]: + raise EmailCodeError() + + AccountService.revoke_email_code_login_token(args["token"]) + account = AccountService.get_user_through_email(user_email) + if account: + tenant = TenantService.get_join_tenants(account) + if not tenant: + if not FeatureService.get_system_features().is_allow_create_workspace: + raise NotAllowedCreateWorkspace() + else: + tenant = TenantService.create_tenant(f"{account.name}'s Workspace") + TenantService.create_tenant_member(tenant, account, role="owner") + account.current_tenant = tenant + tenant_was_created.send(tenant) + + if account is None: + try: + account = AccountService.create_account_and_tenant( + email=user_email, name=user_email, interface_language=languages[0] + ) + except WorkSpaceNotAllowedCreateError: + return NotAllowedCreateWorkspace() + token_pair = AccountService.login(account, ip_address=extract_remote_ip(request)) + AccountService.reset_login_error_rate_limit(args["email"]) + return {"result": "success", "data": token_pair.model_dump()} class RefreshTokenApi(Resource): @@ -120,4 +216,7 @@ class RefreshTokenApi(Resource): api.add_resource(LoginApi, "/login") api.add_resource(LogoutApi, "/logout") +api.add_resource(EmailCodeLoginSendEmailApi, "/email-code-login") +api.add_resource(EmailCodeLoginApi, "/email-code-login/validity") +api.add_resource(ResetPasswordSendEmailApi, "/reset-password") api.add_resource(RefreshTokenApi, "/refresh-token") diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index c5909b8c10..d27e3353c9 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -5,14 +5,20 @@ from typing import Optional import requests from flask import current_app, redirect, request from flask_restful import Resource +from werkzeug.exceptions import Unauthorized from configs import dify_config from constants.languages import languages +from events.tenant_event import tenant_was_created from extensions.ext_database import db from libs.helper import extract_remote_ip from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo -from models.account import Account, AccountStatus +from models import Account +from models.account import AccountStatus from services.account_service import AccountService, RegisterService, TenantService +from services.errors.account import AccountNotFoundError +from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkSpaceNotFoundError +from services.feature_service import FeatureService from .. import api @@ -42,6 +48,7 @@ def get_oauth_providers(): class OAuthLogin(Resource): def get(self, provider: str): + invite_token = request.args.get("invite_token") or None OAUTH_PROVIDERS = get_oauth_providers() with current_app.app_context(): oauth_provider = OAUTH_PROVIDERS.get(provider) @@ -49,7 +56,7 @@ class OAuthLogin(Resource): if not oauth_provider: return {"error": "Invalid provider"}, 400 - auth_url = oauth_provider.get_authorization_url() + auth_url = oauth_provider.get_authorization_url(invite_token=invite_token) return redirect(auth_url) @@ -62,6 +69,11 @@ class OAuthCallback(Resource): return {"error": "Invalid provider"}, 400 code = request.args.get("code") + state = request.args.get("state") + invite_token = None + if state: + invite_token = state + try: token = oauth_provider.get_access_token(code) user_info = oauth_provider.get_user_info(token) @@ -69,17 +81,43 @@ class OAuthCallback(Resource): logging.exception(f"An error occurred during the OAuth process with {provider}: {e.response.text}") return {"error": "OAuth process failed"}, 400 - account = _generate_account(provider, user_info) + if invite_token and RegisterService.is_valid_invite_token(invite_token): + invitation = RegisterService._get_invitation_by_token(token=invite_token) + if invitation: + invitation_email = invitation.get("email", None) + if invitation_email != user_info.email: + return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Invalid invitation token.") + + return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin/invite-settings?invite_token={invite_token}") + + try: + account = _generate_account(provider, user_info) + except AccountNotFoundError: + return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Account not found.") + except (WorkSpaceNotFoundError, WorkSpaceNotAllowedCreateError): + return redirect( + f"{dify_config.CONSOLE_WEB_URL}/signin" + "?message=Workspace not found, please contact system admin to invite you to join in a workspace." + ) + # Check account status - if account.status in {AccountStatus.BANNED.value, AccountStatus.CLOSED.value}: - return {"error": "Account is banned or closed."}, 403 + if account.status == AccountStatus.BANNED.value: + return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Account is banned.") if account.status == AccountStatus.PENDING.value: account.status = AccountStatus.ACTIVE.value account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None) db.session.commit() - TenantService.create_owner_tenant_if_not_exist(account) + try: + TenantService.create_owner_tenant_if_not_exist(account) + except Unauthorized: + return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message=Workspace not found.") + except WorkSpaceNotAllowedCreateError: + return redirect( + f"{dify_config.CONSOLE_WEB_URL}/signin" + "?message=Workspace not found, please contact system admin to invite you to join in a workspace." + ) token_pair = AccountService.login( account=account, @@ -104,8 +142,20 @@ def _generate_account(provider: str, user_info: OAuthUserInfo): # Get account by openid or email. account = _get_account_by_openid_or_email(provider, user_info) + if account: + tenant = TenantService.get_join_tenants(account) + if not tenant: + if not FeatureService.get_system_features().is_allow_create_workspace: + raise WorkSpaceNotAllowedCreateError() + else: + tenant = TenantService.create_tenant(f"{account.name}'s Workspace") + TenantService.create_tenant_member(tenant, account, role="owner") + account.current_tenant = tenant + tenant_was_created.send(tenant) + if not account: - # Create account + if not FeatureService.get_system_features().is_allow_register: + raise AccountNotFoundError() account_name = user_info.name or "Dify" account = RegisterService.register( email=user_info.email, name=account_name, password=None, open_id=user_info.id, provider=provider diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py index 0e1acab946..a2c9760782 100644 --- a/api/controllers/console/datasets/data_source.py +++ b/api/controllers/console/datasets/data_source.py @@ -15,8 +15,7 @@ from core.rag.extractor.notion_extractor import NotionExtractor from extensions.ext_database import db from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields from libs.login import login_required -from models.dataset import Document -from models.source import DataSourceOauthBinding +from models import DataSourceOauthBinding, Document from services.dataset_service import DatasetService, DocumentService from tasks.document_indexing_sync_task import document_indexing_sync_task diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 6583356d23..c8022efb8b 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -24,8 +24,8 @@ from fields.app_fields import related_app_list from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fields from fields.document_fields import document_status_fields from libs.login import login_required -from models.dataset import Dataset, DatasetPermissionEnum, Document, DocumentSegment -from models.model import ApiToken, UploadFile +from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile +from models.dataset import DatasetPermissionEnum from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService @@ -102,6 +102,13 @@ class DatasetListApi(Resource): help="type is required. Name must be between 1 to 40 characters.", type=_validate_name, ) + parser.add_argument( + "description", + type=str, + nullable=True, + required=False, + default="", + ) parser.add_argument( "indexing_technique", type=str, @@ -140,6 +147,7 @@ class DatasetListApi(Resource): dataset = DatasetService.create_empty_dataset( tenant_id=current_user.current_tenant_id, name=args["name"], + description=args["description"], indexing_technique=args["indexing_technique"], account=current_user, permission=DatasetPermissionEnum.ONLY_ME, @@ -619,6 +627,7 @@ class DatasetRetrievalSettingApi(Resource): | VectorType.PGVECTO_RS | VectorType.BAIDU | VectorType.VIKINGDB + | VectorType.UPSTASH ): return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} case ( @@ -630,6 +639,7 @@ class DatasetRetrievalSettingApi(Resource): | VectorType.ORACLE | VectorType.ELASTICSEARCH | VectorType.PGVECTOR + | VectorType.TIDB_ON_QDRANT ): return { "retrieval_method": [ @@ -657,6 +667,7 @@ class DatasetRetrievalSettingMockApi(Resource): | VectorType.PGVECTO_RS | VectorType.BAIDU | VectorType.VIKINGDB + | VectorType.UPSTASH ): return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} case ( diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 829ef11e52..cdabac491e 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -46,8 +46,7 @@ from fields.document_fields import ( document_with_segments_fields, ) from libs.login import login_required -from models.dataset import Dataset, DatasetProcessRule, Document, DocumentSegment -from models.model import UploadFile +from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile from services.dataset_service import DatasetService, DocumentService from tasks.add_document_to_index_task import add_document_to_index_task from tasks.remove_document_from_index_task import remove_document_from_index_task diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index 2405649387..08ea414288 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -24,7 +24,7 @@ from extensions.ext_database import db from extensions.ext_redis import redis_client from fields.segment_fields import segment_fields from libs.login import login_required -from models.dataset import DocumentSegment +from models import DocumentSegment from services.dataset_service import DatasetService, DocumentService, SegmentService from tasks.batch_create_segment_to_index_task import batch_create_segment_to_index_task from tasks.disable_segment_from_index_task import disable_segment_from_index_task diff --git a/api/controllers/console/datasets/file.py b/api/controllers/console/datasets/file.py index 846aa70e86..17d2879875 100644 --- a/api/controllers/console/datasets/file.py +++ b/api/controllers/console/datasets/file.py @@ -1,9 +1,12 @@ +import urllib.parse + from flask import request from flask_login import current_user -from flask_restful import Resource, marshal_with +from flask_restful import Resource, marshal_with, reqparse import services from configs import dify_config +from constants import DOCUMENT_EXTENSIONS from controllers.console import api from controllers.console.datasets.error import ( FileTooLargeError, @@ -13,9 +16,10 @@ from controllers.console.datasets.error import ( ) from controllers.console.setup import setup_required from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check -from fields.file_fields import file_fields, upload_config_fields +from core.helper import ssrf_proxy +from fields.file_fields import file_fields, remote_file_info_fields, upload_config_fields from libs.login import login_required -from services.file_service import ALLOWED_EXTENSIONS, UNSTRUCTURED_ALLOWED_EXTENSIONS, FileService +from services.file_service import FileService PREVIEW_WORDS_LIMIT = 3000 @@ -26,13 +30,12 @@ class FileApi(Resource): @account_initialization_required @marshal_with(upload_config_fields) def get(self): - file_size_limit = dify_config.UPLOAD_FILE_SIZE_LIMIT - batch_count_limit = dify_config.UPLOAD_FILE_BATCH_LIMIT - image_file_size_limit = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT return { - "file_size_limit": file_size_limit, - "batch_count_limit": batch_count_limit, - "image_file_size_limit": image_file_size_limit, + "file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT, + "batch_count_limit": dify_config.UPLOAD_FILE_BATCH_LIMIT, + "image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT, + "video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT, + "audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT, }, 200 @setup_required @@ -44,6 +47,10 @@ class FileApi(Resource): # get file from request file = request.files["file"] + parser = reqparse.RequestParser() + parser.add_argument("source", type=str, required=False, location="args") + source = parser.parse_args().get("source") + # check file if "file" not in request.files: raise NoFileUploadedError() @@ -51,7 +58,7 @@ class FileApi(Resource): if len(request.files) > 1: raise TooManyFilesError() try: - upload_file = FileService.upload_file(file, current_user) + upload_file = FileService.upload_file(file=file, user=current_user, source=source) except services.errors.file.FileTooLargeError as file_too_large_error: raise FileTooLargeError(file_too_large_error.description) except services.errors.file.UnsupportedFileTypeError: @@ -75,11 +82,24 @@ class FileSupportTypeApi(Resource): @login_required @account_initialization_required def get(self): - etl_type = dify_config.ETL_TYPE - allowed_extensions = UNSTRUCTURED_ALLOWED_EXTENSIONS if etl_type == "Unstructured" else ALLOWED_EXTENSIONS - return {"allowed_extensions": allowed_extensions} + return {"allowed_extensions": DOCUMENT_EXTENSIONS} + + +class RemoteFileInfoApi(Resource): + @marshal_with(remote_file_info_fields) + def get(self, url): + decoded_url = urllib.parse.unquote(url) + try: + response = ssrf_proxy.head(decoded_url) + return { + "file_type": response.headers.get("Content-Type", "application/octet-stream"), + "file_length": int(response.headers.get("Content-Length", 0)), + } + except Exception as e: + return {"error": str(e)}, 400 api.add_resource(FileApi, "/files/upload") api.add_resource(FilePreviewApi, "/files//preview") api.add_resource(FileSupportTypeApi, "/files/support-type") +api.add_resource(RemoteFileInfoApi, "/remote-files/") diff --git a/api/controllers/console/error.py b/api/controllers/console/error.py index 870e547728..ed6a99a017 100644 --- a/api/controllers/console/error.py +++ b/api/controllers/console/error.py @@ -38,3 +38,27 @@ class AlreadyActivateError(BaseHTTPException): error_code = "already_activate" description = "Auth Token is invalid or account already activated, please check again." code = 403 + + +class NotAllowedCreateWorkspace(BaseHTTPException): + error_code = "not_allowed_create_workspace" + description = "Workspace not found, please contact system admin to invite you to join in a workspace." + code = 400 + + +class AccountBannedError(BaseHTTPException): + error_code = "account_banned" + description = "Account is banned." + code = 400 + + +class NotAllowedRegister(BaseHTTPException): + error_code = "unauthorized" + description = "Account not found." + code = 400 + + +class EmailSendIpLimitError(BaseHTTPException): + error_code = "email_send_ip_limit" + description = "Too many emails have been sent from this IP address recently. Please try again later." + code = 429 diff --git a/api/controllers/console/explore/installed_app.py b/api/controllers/console/explore/installed_app.py index 408afc33a0..d72715a38c 100644 --- a/api/controllers/console/explore/installed_app.py +++ b/api/controllers/console/explore/installed_app.py @@ -11,7 +11,7 @@ from controllers.console.wraps import account_initialization_required, cloud_edi from extensions.ext_database import db from fields.installed_app_fields import installed_app_list_fields from libs.login import login_required -from models.model import App, InstalledApp, RecommendedApp +from models import App, InstalledApp, RecommendedApp from services.account_service import TenantService diff --git a/api/controllers/console/explore/parameter.py b/api/controllers/console/explore/parameter.py index aab7dd7888..7c7580e3c6 100644 --- a/api/controllers/console/explore/parameter.py +++ b/api/controllers/console/explore/parameter.py @@ -21,7 +21,12 @@ class AppParameterApi(InstalledAppResource): "options": fields.List(fields.String), } - system_parameters_fields = {"image_file_size_limit": fields.String} + system_parameters_fields = { + "image_file_size_limit": fields.Integer, + "video_file_size_limit": fields.Integer, + "audio_file_size_limit": fields.Integer, + "file_size_limit": fields.Integer, + } parameters_fields = { "opening_statement": fields.String, @@ -82,7 +87,12 @@ class AppParameterApi(InstalledAppResource): } }, ), - "system_parameters": {"image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT}, + "system_parameters": { + "image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT, + "video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT, + "audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT, + "file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT, + }, } diff --git a/api/controllers/console/explore/saved_message.py b/api/controllers/console/explore/saved_message.py index a7ccf737a8..0fc9637479 100644 --- a/api/controllers/console/explore/saved_message.py +++ b/api/controllers/console/explore/saved_message.py @@ -18,7 +18,7 @@ message_fields = { "inputs": fields.Raw, "query": fields.String, "answer": fields.String, - "message_files": fields.List(fields.Nested(message_file_fields), attribute="files"), + "message_files": fields.List(fields.Nested(message_file_fields)), "feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True), "created_at": TimestampField, } diff --git a/api/controllers/console/explore/wraps.py b/api/controllers/console/explore/wraps.py index 3c9317847b..49ea81a8a0 100644 --- a/api/controllers/console/explore/wraps.py +++ b/api/controllers/console/explore/wraps.py @@ -7,7 +7,7 @@ from werkzeug.exceptions import NotFound from controllers.console.wraps import account_initialization_required from extensions.ext_database import db from libs.login import login_required -from models.model import InstalledApp +from models import InstalledApp def installed_app_required(view=None): diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index dec426128f..97f5625726 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -20,7 +20,7 @@ from extensions.ext_database import db from fields.member_fields import account_fields from libs.helper import TimestampField, timezone from libs.login import login_required -from models.account import AccountIntegrate, InvitationCode +from models import AccountIntegrate, InvitationCode from services.account_service import AccountService from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index d2a17b133b..aaa24d501c 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -360,16 +360,15 @@ class ToolWorkflowProviderCreateApi(Resource): args = reqparser.parse_args() return WorkflowToolManageService.create_workflow_tool( - user_id, - tenant_id, - args["workflow_app_id"], - args["name"], - args["label"], - args["icon"], - args["description"], - args["parameters"], - args["privacy_policy"], - args.get("labels", []), + user_id=user_id, + tenant_id=tenant_id, + workflow_app_id=args["workflow_app_id"], + name=args["name"], + label=args["label"], + icon=args["icon"], + description=args["description"], + parameters=args["parameters"], + privacy_policy=args["privacy_policy"], ) diff --git a/api/controllers/console/workspace/workspace.py b/api/controllers/console/workspace/workspace.py index af3ebc099b..96f866fca2 100644 --- a/api/controllers/console/workspace/workspace.py +++ b/api/controllers/console/workspace/workspace.py @@ -198,7 +198,7 @@ class WebappLogoWorkspaceApi(Resource): raise UnsupportedFileTypeError() try: - upload_file = FileService.upload_file(file, current_user, True) + upload_file = FileService.upload_file(file=file, user=current_user) except services.errors.file.FileTooLargeError as file_too_large_error: raise FileTooLargeError(file_too_large_error.description) diff --git a/api/controllers/files/image_preview.py b/api/controllers/files/image_preview.py index a56c1c332d..6b3ac93cdf 100644 --- a/api/controllers/files/image_preview.py +++ b/api/controllers/files/image_preview.py @@ -1,5 +1,5 @@ from flask import Response, request -from flask_restful import Resource +from flask_restful import Resource, reqparse from werkzeug.exceptions import NotFound import services @@ -10,6 +10,10 @@ from services.file_service import FileService class ImagePreviewApi(Resource): + """ + Deprecated + """ + def get(self, file_id): file_id = str(file_id) @@ -21,13 +25,57 @@ class ImagePreviewApi(Resource): return {"content": "Invalid request."}, 400 try: - generator, mimetype = FileService.get_image_preview(file_id, timestamp, nonce, sign) + generator, mimetype = FileService.get_image_preview( + file_id=file_id, + timestamp=timestamp, + nonce=nonce, + sign=sign, + ) except services.errors.file.UnsupportedFileTypeError: raise UnsupportedFileTypeError() return Response(generator, mimetype=mimetype) +class FilePreviewApi(Resource): + def get(self, file_id): + file_id = str(file_id) + + parser = reqparse.RequestParser() + parser.add_argument("timestamp", type=str, required=True, location="args") + parser.add_argument("nonce", type=str, required=True, location="args") + parser.add_argument("sign", type=str, required=True, location="args") + parser.add_argument("as_attachment", type=bool, required=False, default=False, location="args") + + args = parser.parse_args() + + if not args["timestamp"] or not args["nonce"] or not args["sign"]: + return {"content": "Invalid request."}, 400 + + try: + generator, upload_file = FileService.get_file_generator_by_file_id( + file_id=file_id, + timestamp=args["timestamp"], + nonce=args["nonce"], + sign=args["sign"], + ) + except services.errors.file.UnsupportedFileTypeError: + raise UnsupportedFileTypeError() + + response = Response( + generator, + mimetype=upload_file.mime_type, + direct_passthrough=True, + headers={}, + ) + if upload_file.size > 0: + response.headers["Content-Length"] = str(upload_file.size) + if args["as_attachment"]: + response.headers["Content-Disposition"] = f"attachment; filename={upload_file.name}" + + return response + + class WorkspaceWebappLogoApi(Resource): def get(self, workspace_id): workspace_id = str(workspace_id) @@ -49,4 +97,5 @@ class WorkspaceWebappLogoApi(Resource): api.add_resource(ImagePreviewApi, "/files//image-preview") +api.add_resource(FilePreviewApi, "/files//file-preview") api.add_resource(WorkspaceWebappLogoApi, "/files/workspaces//webapp-logo") diff --git a/api/controllers/files/tool_files.py b/api/controllers/files/tool_files.py index 406cd42214..a298701a2f 100644 --- a/api/controllers/files/tool_files.py +++ b/api/controllers/files/tool_files.py @@ -16,6 +16,7 @@ class ToolFilePreviewApi(Resource): parser.add_argument("timestamp", type=str, required=True, location="args") parser.add_argument("nonce", type=str, required=True, location="args") parser.add_argument("sign", type=str, required=True, location="args") + parser.add_argument("as_attachment", type=bool, required=False, default=False, location="args") args = parser.parse_args() @@ -28,18 +29,27 @@ class ToolFilePreviewApi(Resource): raise Forbidden("Invalid request.") try: - result = ToolFileManager.get_file_generator_by_tool_file_id( + stream, tool_file = ToolFileManager.get_file_generator_by_tool_file_id( file_id, ) - if not result: + if not stream or not tool_file: raise NotFound("file is not found") - - generator, mimetype = result except Exception: raise UnsupportedFileTypeError() - return Response(generator, mimetype=mimetype) + response = Response( + stream, + mimetype=tool_file.mimetype, + direct_passthrough=True, + headers={}, + ) + if tool_file.size > 0: + response.headers["Content-Length"] = str(tool_file.size) + if args["as_attachment"]: + response.headers["Content-Disposition"] = f"attachment; filename={tool_file.name}" + + return response api.add_resource(ToolFilePreviewApi, "/files/tools/.") diff --git a/api/controllers/service_api/app/app.py b/api/controllers/service_api/app/app.py index f7c091217b..9a4cdc26cd 100644 --- a/api/controllers/service_api/app/app.py +++ b/api/controllers/service_api/app/app.py @@ -21,7 +21,12 @@ class AppParameterApi(Resource): "options": fields.List(fields.String), } - system_parameters_fields = {"image_file_size_limit": fields.String} + system_parameters_fields = { + "image_file_size_limit": fields.Integer, + "video_file_size_limit": fields.Integer, + "audio_file_size_limit": fields.Integer, + "file_size_limit": fields.Integer, + } parameters_fields = { "opening_statement": fields.String, @@ -81,7 +86,12 @@ class AppParameterApi(Resource): } }, ), - "system_parameters": {"image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT}, + "system_parameters": { + "image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT, + "video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT, + "audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT, + "file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT, + }, } diff --git a/api/controllers/service_api/app/message.py b/api/controllers/service_api/app/message.py index a70ee89b5e..ecff804adc 100644 --- a/api/controllers/service_api/app/message.py +++ b/api/controllers/service_api/app/message.py @@ -48,7 +48,7 @@ class MessageListApi(Resource): "tool_input": fields.String, "created_at": TimestampField, "observation": fields.String, - "message_files": fields.List(fields.String, attribute="files"), + "message_files": fields.List(fields.Nested(message_file_fields)), } message_fields = { @@ -58,7 +58,7 @@ class MessageListApi(Resource): "inputs": fields.Raw, "query": fields.String, "answer": fields.String(attribute="re_sign_file_url_answer"), - "message_files": fields.List(fields.Nested(message_file_fields), attribute="files"), + "message_files": fields.List(fields.Nested(message_file_fields)), "feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True), "retriever_resources": fields.List(fields.Nested(retriever_resource_fields)), "created_at": TimestampField, diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index f076cff6c8..799fccc228 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -66,6 +66,13 @@ class DatasetListApi(DatasetApiResource): help="type is required. Name must be between 1 to 40 characters.", type=_validate_name, ) + parser.add_argument( + "description", + type=str, + nullable=True, + required=False, + default="", + ) parser.add_argument( "indexing_technique", type=str, @@ -108,6 +115,7 @@ class DatasetListApi(DatasetApiResource): dataset = DatasetService.create_empty_dataset( tenant_id=tenant_id, name=args["name"], + description=args["description"], indexing_technique=args["indexing_technique"], account=current_user, permission=args["permission"], diff --git a/api/controllers/web/app.py b/api/controllers/web/app.py index 20b4e4674c..974d2cff94 100644 --- a/api/controllers/web/app.py +++ b/api/controllers/web/app.py @@ -21,7 +21,12 @@ class AppParameterApi(WebApiResource): "options": fields.List(fields.String), } - system_parameters_fields = {"image_file_size_limit": fields.String} + system_parameters_fields = { + "image_file_size_limit": fields.Integer, + "video_file_size_limit": fields.Integer, + "audio_file_size_limit": fields.Integer, + "file_size_limit": fields.Integer, + } parameters_fields = { "opening_statement": fields.String, @@ -80,7 +85,12 @@ class AppParameterApi(WebApiResource): } }, ), - "system_parameters": {"image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT}, + "system_parameters": { + "image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT, + "video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT, + "audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT, + "file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT, + }, } diff --git a/api/controllers/web/file.py b/api/controllers/web/file.py index 253b1d511c..6eeaa0e3f0 100644 --- a/api/controllers/web/file.py +++ b/api/controllers/web/file.py @@ -1,11 +1,14 @@ +import urllib.parse + from flask import request -from flask_restful import marshal_with +from flask_restful import marshal_with, reqparse import services from controllers.web import api from controllers.web.error import FileTooLargeError, NoFileUploadedError, TooManyFilesError, UnsupportedFileTypeError from controllers.web.wraps import WebApiResource -from fields.file_fields import file_fields +from core.helper import ssrf_proxy +from fields.file_fields import file_fields, remote_file_info_fields from services.file_service import FileService @@ -15,6 +18,10 @@ class FileApi(WebApiResource): # get file from request file = request.files["file"] + parser = reqparse.RequestParser() + parser.add_argument("source", type=str, required=False, location="args") + source = parser.parse_args().get("source") + # check file if "file" not in request.files: raise NoFileUploadedError() @@ -22,7 +29,7 @@ class FileApi(WebApiResource): if len(request.files) > 1: raise TooManyFilesError() try: - upload_file = FileService.upload_file(file, end_user) + upload_file = FileService.upload_file(file=file, user=end_user, source=source) except services.errors.file.FileTooLargeError as file_too_large_error: raise FileTooLargeError(file_too_large_error.description) except services.errors.file.UnsupportedFileTypeError: @@ -31,4 +38,19 @@ class FileApi(WebApiResource): return upload_file, 201 +class RemoteFileInfoApi(WebApiResource): + @marshal_with(remote_file_info_fields) + def get(self, url): + decoded_url = urllib.parse.unquote(url) + try: + response = ssrf_proxy.head(decoded_url) + return { + "file_type": response.headers.get("Content-Type", "application/octet-stream"), + "file_length": int(response.headers.get("Content-Length", -1)), + } + except Exception as e: + return {"error": str(e)}, 400 + + api.add_resource(FileApi, "/files/upload") +api.add_resource(RemoteFileInfoApi, "/remote-files/") diff --git a/api/controllers/web/message.py b/api/controllers/web/message.py index 2d2a5866c8..98891f5d00 100644 --- a/api/controllers/web/message.py +++ b/api/controllers/web/message.py @@ -22,6 +22,7 @@ from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotIni from core.model_runtime.errors.invoke import InvokeError from fields.conversation_fields import message_file_fields from fields.message_fields import agent_thought_fields +from fields.raws import FilesContainedField from libs import helper from libs.helper import TimestampField, uuid_value from models.model import AppMode @@ -58,10 +59,10 @@ class MessageListApi(WebApiResource): "id": fields.String, "conversation_id": fields.String, "parent_message_id": fields.String, - "inputs": fields.Raw, + "inputs": FilesContainedField, "query": fields.String, "answer": fields.String(attribute="re_sign_file_url_answer"), - "message_files": fields.List(fields.Nested(message_file_fields), attribute="files"), + "message_files": fields.List(fields.Nested(message_file_fields)), "feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True), "retriever_resources": fields.List(fields.Nested(retriever_resource_fields)), "created_at": TimestampField, diff --git a/api/controllers/web/saved_message.py b/api/controllers/web/saved_message.py index 8253f5fc57..b0492e6b6f 100644 --- a/api/controllers/web/saved_message.py +++ b/api/controllers/web/saved_message.py @@ -17,7 +17,7 @@ message_fields = { "inputs": fields.Raw, "query": fields.String, "answer": fields.String, - "message_files": fields.List(fields.Nested(message_file_fields), attribute="files"), + "message_files": fields.List(fields.Nested(message_file_fields)), "feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True), "created_at": TimestampField, } diff --git a/api/core/agent/base_agent_runner.py b/api/core/agent/base_agent_runner.py index 5295f97bdb..514dcfbd68 100644 --- a/api/core/agent/base_agent_runner.py +++ b/api/core/agent/base_agent_runner.py @@ -16,13 +16,14 @@ from core.app.entities.app_invoke_entities import ( ) from core.callback_handler.agent_tool_callback_handler import DifyAgentCallbackHandler from core.callback_handler.index_tool_callback_handler import DatasetIndexToolCallbackHandler -from core.file.message_file_parser import MessageFileParser +from core.file import file_manager from core.memory.token_buffer_memory import TokenBufferMemory from core.model_manager import ModelInstance -from core.model_runtime.entities.llm_entities import LLMUsage -from core.model_runtime.entities.message_entities import ( +from core.model_runtime.entities import ( AssistantPromptMessage, + LLMUsage, PromptMessage, + PromptMessageContent, PromptMessageTool, SystemPromptMessage, TextPromptMessageContent, @@ -40,9 +41,9 @@ from core.tools.entities.tool_entities import ( from core.tools.tool.dataset_retriever_tool import DatasetRetrieverTool from core.tools.tool.tool import Tool from core.tools.tool_manager import ToolManager -from core.tools.utils.tool_parameter_converter import ToolParameterConverter from extensions.ext_database import db -from models.model import Conversation, Message, MessageAgentThought +from factories import file_factory +from models.model import Conversation, Message, MessageAgentThought, MessageFile from models.tools import ToolConversationVariables logger = logging.getLogger(__name__) @@ -66,23 +67,6 @@ class BaseAgentRunner(AppRunner): db_variables: Optional[ToolConversationVariables] = None, model_instance: ModelInstance = None, ) -> None: - """ - Agent runner - :param tenant_id: tenant id - :param application_generate_entity: application generate entity - :param conversation: conversation - :param app_config: app generate entity - :param model_config: model config - :param config: dataset config - :param queue_manager: queue manager - :param message: message - :param user_id: user id - :param memory: memory - :param prompt_messages: prompt messages - :param variables_pool: variables pool - :param db_variables: db variables - :param model_instance: model instance - """ self.tenant_id = tenant_id self.application_generate_entity = application_generate_entity self.conversation = conversation @@ -180,7 +164,7 @@ class BaseAgentRunner(AppRunner): if parameter.form != ToolParameter.ToolParameterForm.LLM: continue - parameter_type = ToolParameterConverter.get_parameter_type(parameter.type) + parameter_type = parameter.type.as_normal_type() enum = [] if parameter.type == ToolParameter.ToolParameterType.SELECT: enum = [option.value for option in parameter.options] @@ -265,7 +249,7 @@ class BaseAgentRunner(AppRunner): if parameter.form != ToolParameter.ToolParameterForm.LLM: continue - parameter_type = ToolParameterConverter.get_parameter_type(parameter.type) + parameter_type = parameter.type.as_normal_type() enum = [] if parameter.type == ToolParameter.ToolParameterType.SELECT: enum = [option.value for option in parameter.options] @@ -511,26 +495,24 @@ class BaseAgentRunner(AppRunner): return result def organize_agent_user_prompt(self, message: Message) -> UserPromptMessage: - message_file_parser = MessageFileParser( - tenant_id=self.tenant_id, - app_id=self.app_config.app_id, - ) - - files = message.message_files + files = db.session.query(MessageFile).filter(MessageFile.message_id == message.id).all() if files: file_extra_config = FileUploadConfigManager.convert(message.app_model_config.to_dict()) if file_extra_config: - file_objs = message_file_parser.transform_message_files(files, file_extra_config) + file_objs = file_factory.build_from_message_files( + message_files=files, tenant_id=self.tenant_id, config=file_extra_config + ) else: file_objs = [] if not file_objs: return UserPromptMessage(content=message.query) else: - prompt_message_contents = [TextPromptMessageContent(data=message.query)] + prompt_message_contents: list[PromptMessageContent] = [] + prompt_message_contents.append(TextPromptMessageContent(data=message.query)) for file_obj in file_objs: - prompt_message_contents.append(file_obj.prompt_message_content) + prompt_message_contents.append(file_manager.to_prompt_message_content(file_obj)) return UserPromptMessage(content=prompt_message_contents) else: diff --git a/api/core/agent/cot_chat_agent_runner.py b/api/core/agent/cot_chat_agent_runner.py index 5e16373fff..6261a9b12c 100644 --- a/api/core/agent/cot_chat_agent_runner.py +++ b/api/core/agent/cot_chat_agent_runner.py @@ -1,9 +1,11 @@ import json from core.agent.cot_agent_runner import CotAgentRunner -from core.model_runtime.entities.message_entities import ( +from core.file import file_manager +from core.model_runtime.entities import ( AssistantPromptMessage, PromptMessage, + PromptMessageContent, SystemPromptMessage, TextPromptMessageContent, UserPromptMessage, @@ -32,9 +34,10 @@ class CotChatAgentRunner(CotAgentRunner): Organize user query """ if self.files: - prompt_message_contents = [TextPromptMessageContent(data=query)] + prompt_message_contents: list[PromptMessageContent] = [] + prompt_message_contents.append(TextPromptMessageContent(data=query)) for file_obj in self.files: - prompt_message_contents.append(file_obj.prompt_message_content) + prompt_message_contents.append(file_manager.to_prompt_message_content(file_obj)) prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) else: diff --git a/api/core/agent/fc_agent_runner.py b/api/core/agent/fc_agent_runner.py index 7b22025582..9083b4e85f 100644 --- a/api/core/agent/fc_agent_runner.py +++ b/api/core/agent/fc_agent_runner.py @@ -7,10 +7,15 @@ from typing import Any, Optional, Union from core.agent.base_agent_runner import BaseAgentRunner from core.app.apps.base_app_queue_manager import PublishFrom from core.app.entities.queue_entities import QueueAgentThoughtEvent, QueueMessageEndEvent, QueueMessageFileEvent -from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage -from core.model_runtime.entities.message_entities import ( +from core.file import file_manager +from core.model_runtime.entities import ( AssistantPromptMessage, + LLMResult, + LLMResultChunk, + LLMResultChunkDelta, + LLMUsage, PromptMessage, + PromptMessageContent, PromptMessageContentType, SystemPromptMessage, TextPromptMessageContent, @@ -390,9 +395,10 @@ class FunctionCallAgentRunner(BaseAgentRunner): Organize user query """ if self.files: - prompt_message_contents = [TextPromptMessageContent(data=query)] + prompt_message_contents: list[PromptMessageContent] = [] + prompt_message_contents.append(TextPromptMessageContent(data=query)) for file_obj in self.files: - prompt_message_contents.append(file_obj.prompt_message_content) + prompt_message_contents.append(file_manager.to_prompt_message_content(file_obj)) prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) else: diff --git a/api/core/app/app_config/easy_ui_based_app/variables/manager.py b/api/core/app/app_config/easy_ui_based_app/variables/manager.py index a1bfde3208..2f2445a336 100644 --- a/api/core/app/app_config/easy_ui_based_app/variables/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/variables/manager.py @@ -53,12 +53,11 @@ class BasicVariablesConfigManager: VariableEntity( type=variable_type, variable=variable.get("variable"), - description=variable.get("description"), + description=variable.get("description") or "", label=variable.get("label"), required=variable.get("required", False), max_length=variable.get("max_length"), - options=variable.get("options"), - default=variable.get("default"), + options=variable.get("options") or [], ) ) diff --git a/api/core/app/app_config/entities.py b/api/core/app/app_config/entities.py index 7e5899bafa..6c6e342a07 100644 --- a/api/core/app/app_config/entities.py +++ b/api/core/app/app_config/entities.py @@ -1,11 +1,12 @@ +from collections.abc import Sequence from enum import Enum from typing import Any, Optional -from pydantic import BaseModel +from pydantic import BaseModel, Field, field_validator -from core.file.file_obj import FileExtraConfig +from core.file import FileExtraConfig, FileTransferMethod, FileType from core.model_runtime.entities.message_entities import PromptMessageRole -from models import AppMode +from models.model import AppMode class ModelConfigEntity(BaseModel): @@ -69,7 +70,7 @@ class PromptTemplateEntity(BaseModel): ADVANCED = "advanced" @classmethod - def value_of(cls, value: str) -> "PromptType": + def value_of(cls, value: str): """ Get value of given mode. @@ -93,6 +94,8 @@ class VariableEntityType(str, Enum): PARAGRAPH = "paragraph" NUMBER = "number" EXTERNAL_DATA_TOOL = "external_data_tool" + FILE = "file" + FILE_LIST = "file-list" class VariableEntity(BaseModel): @@ -102,13 +105,24 @@ class VariableEntity(BaseModel): variable: str label: str - description: Optional[str] = None + description: str = "" type: VariableEntityType required: bool = False max_length: Optional[int] = None - options: Optional[list[str]] = None - default: Optional[str] = None - hint: Optional[str] = None + options: Sequence[str] = Field(default_factory=list) + allowed_file_types: Sequence[FileType] = Field(default_factory=list) + allowed_file_extensions: Sequence[str] = Field(default_factory=list) + allowed_file_upload_methods: Sequence[FileTransferMethod] = Field(default_factory=list) + + @field_validator("description", mode="before") + @classmethod + def convert_none_description(cls, v: Any) -> str: + return v or "" + + @field_validator("options", mode="before") + @classmethod + def convert_none_options(cls, v: Any) -> Sequence[str]: + return v or [] class ExternalDataVariableEntity(BaseModel): @@ -136,7 +150,7 @@ class DatasetRetrieveConfigEntity(BaseModel): MULTIPLE = "multiple" @classmethod - def value_of(cls, value: str) -> "RetrieveStrategy": + def value_of(cls, value: str): """ Get value of given mode. diff --git a/api/core/app/app_config/features/file_upload/manager.py b/api/core/app/app_config/features/file_upload/manager.py index 7a275cb532..42beec2535 100644 --- a/api/core/app/app_config/features/file_upload/manager.py +++ b/api/core/app/app_config/features/file_upload/manager.py @@ -1,12 +1,13 @@ from collections.abc import Mapping -from typing import Any, Optional +from typing import Any -from core.file.file_obj import FileExtraConfig +from core.file.models import FileExtraConfig +from models import FileUploadConfig class FileUploadConfigManager: @classmethod - def convert(cls, config: Mapping[str, Any], is_vision: bool = True) -> Optional[FileExtraConfig]: + def convert(cls, config: Mapping[str, Any], is_vision: bool = True): """ Convert model config to model config @@ -15,19 +16,21 @@ class FileUploadConfigManager: """ file_upload_dict = config.get("file_upload") if file_upload_dict: - if file_upload_dict.get("image"): - if "enabled" in file_upload_dict["image"] and file_upload_dict["image"]["enabled"]: - image_config = { - "number_limits": file_upload_dict["image"]["number_limits"], - "transfer_methods": file_upload_dict["image"]["transfer_methods"], + if file_upload_dict.get("enabled"): + transform_methods = file_upload_dict.get("allowed_file_upload_methods") or file_upload_dict.get( + "allowed_upload_methods", [] + ) + data = { + "image_config": { + "number_limits": file_upload_dict["number_limits"], + "transfer_methods": transform_methods, } + } - if is_vision: - image_config["detail"] = file_upload_dict["image"]["detail"] + if is_vision: + data["image_config"]["detail"] = file_upload_dict.get("image", {}).get("detail", "low") - return FileExtraConfig(image_config=image_config) - - return None + return FileExtraConfig.model_validate(data) @classmethod def validate_and_set_defaults(cls, config: dict, is_vision: bool = True) -> tuple[dict, list[str]]: @@ -39,29 +42,7 @@ class FileUploadConfigManager: """ if not config.get("file_upload"): config["file_upload"] = {} - - if not isinstance(config["file_upload"], dict): - raise ValueError("file_upload must be of dict type") - - # check image config - if not config["file_upload"].get("image"): - config["file_upload"]["image"] = {"enabled": False} - - if config["file_upload"]["image"]["enabled"]: - number_limits = config["file_upload"]["image"]["number_limits"] - if number_limits < 1 or number_limits > 6: - raise ValueError("number_limits must be in [1, 6]") - - if is_vision: - detail = config["file_upload"]["image"]["detail"] - if detail not in {"high", "low"}: - raise ValueError("detail must be in ['high', 'low']") - - transfer_methods = config["file_upload"]["image"]["transfer_methods"] - if not isinstance(transfer_methods, list): - raise ValueError("transfer_methods must be of list type") - for method in transfer_methods: - if method not in {"remote_url", "local_file"}: - raise ValueError("transfer_methods must be in ['remote_url', 'local_file']") + else: + FileUploadConfig.model_validate(config["file_upload"]) return config, ["file_upload"] diff --git a/api/core/app/app_config/workflow_ui_based_app/variables/manager.py b/api/core/app/app_config/workflow_ui_based_app/variables/manager.py index 4b117d87f8..2f1da38082 100644 --- a/api/core/app/app_config/workflow_ui_based_app/variables/manager.py +++ b/api/core/app/app_config/workflow_ui_based_app/variables/manager.py @@ -17,6 +17,6 @@ class WorkflowVariablesConfigManager: # variables for variable in user_input_form: - variables.append(VariableEntity(**variable)) + variables.append(VariableEntity.model_validate(variable)) return variables diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index 7fff925f4b..39ab87c914 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -21,11 +21,12 @@ from core.app.apps.message_based_app_generator import MessageBasedAppGenerator from core.app.apps.message_based_app_queue_manager import MessageBasedAppQueueManager from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom from core.app.entities.task_entities import ChatbotAppBlockingResponse, ChatbotAppStreamResponse -from core.file.message_file_parser import MessageFileParser from core.model_runtime.errors.invoke import InvokeAuthorizationError, InvokeError from core.ops.ops_trace_manager import TraceQueueManager from extensions.ext_database import db +from factories import file_factory from models.account import Account +from models.enums import CreatedByRole from models.model import App, Conversation, EndUser, Message from models.workflow import Workflow @@ -96,10 +97,16 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): # parse files files = args["files"] if args.get("files") else [] - message_file_parser = MessageFileParser(tenant_id=app_model.tenant_id, app_id=app_model.id) file_extra_config = FileUploadConfigManager.convert(workflow.features_dict, is_vision=False) + role = CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER if file_extra_config: - file_objs = message_file_parser.validate_and_transform_files_arg(files, file_extra_config, user) + file_objs = file_factory.build_from_mappings( + mappings=files, + tenant_id=app_model.tenant_id, + user_id=user.id, + role=role, + config=file_extra_config, + ) else: file_objs = [] @@ -107,8 +114,9 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): app_config = AdvancedChatAppConfigManager.get_app_config(app_model=app_model, workflow=workflow) # get tracing instance - user_id = user.id if isinstance(user, Account) else user.session_id - trace_manager = TraceQueueManager(app_model.id, user_id) + trace_manager = TraceQueueManager( + app_id=app_model.id, user_id=user.id if isinstance(user, Account) else user.session_id + ) if invoke_from == InvokeFrom.DEBUGGER: # always enable retriever resource in debugger mode @@ -120,7 +128,9 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): task_id=str(uuid.uuid4()), app_config=app_config, conversation_id=conversation.id if conversation else None, - inputs=conversation.inputs if conversation else self._get_cleaned_inputs(inputs, app_config), + inputs=conversation.inputs + if conversation + else self._prepare_user_inputs(user_inputs=inputs, app_config=app_config, user_id=user.id, role=role), query=query, files=file_objs, parent_message_id=args.get("parent_message_id") if invoke_from != InvokeFrom.SERVICE_API else UUID_NIL, diff --git a/api/core/app/apps/advanced_chat/app_runner.py b/api/core/app/apps/advanced_chat/app_runner.py index 1dcd051d15..65d744eddf 100644 --- a/api/core/app/apps/advanced_chat/app_runner.py +++ b/api/core/app/apps/advanced_chat/app_runner.py @@ -1,31 +1,27 @@ import logging -import os from collections.abc import Mapping from typing import Any, cast from sqlalchemy import select from sqlalchemy.orm import Session +from configs import dify_config from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfig from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.apps.workflow_app_runner import WorkflowBasedAppRunner -from core.app.apps.workflow_logging_callback import WorkflowLoggingCallback -from core.app.entities.app_invoke_entities import ( - AdvancedChatAppGenerateEntity, - InvokeFrom, -) +from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom from core.app.entities.queue_entities import ( QueueAnnotationReplyEvent, QueueStopEvent, QueueTextChunkEvent, ) from core.moderation.base import ModerationError -from core.workflow.callbacks.base_workflow_callback import WorkflowCallback -from core.workflow.entities.node_entities import UserFrom +from core.workflow.callbacks import WorkflowCallback, WorkflowLoggingCallback from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import SystemVariableKey from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db +from models.enums import UserFrom from models.model import App, Conversation, EndUser, Message from models.workflow import ConversationVariable, WorkflowType @@ -44,12 +40,6 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): conversation: Conversation, message: Message, ) -> None: - """ - :param application_generate_entity: application generate entity - :param queue_manager: application queue manager - :param conversation: conversation - :param message: message - """ super().__init__(queue_manager) self.application_generate_entity = application_generate_entity @@ -57,10 +47,6 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): self.message = message def run(self) -> None: - """ - Run application - :return: - """ app_config = self.application_generate_entity.app_config app_config = cast(AdvancedChatAppConfig, app_config) @@ -81,7 +67,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): user_id = self.application_generate_entity.user_id workflow_callbacks: list[WorkflowCallback] = [] - if bool(os.environ.get("DEBUG", "False").lower() == "true"): + if dify_config.DEBUG: workflow_callbacks.append(WorkflowLoggingCallback()) if self.application_generate_entity.single_iteration_run: @@ -201,15 +187,6 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): query: str, message_id: str, ) -> bool: - """ - Handle input moderation - :param app_record: app record - :param app_generate_entity: application generate entity - :param inputs: inputs - :param query: query - :param message_id: message id - :return: - """ try: # process sensitive_word_avoidance _, inputs, query = self.moderation_for_inputs( @@ -229,14 +206,6 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): def handle_annotation_reply( self, app_record: App, message: Message, query: str, app_generate_entity: AdvancedChatAppGenerateEntity ) -> bool: - """ - Handle annotation reply - :param app_record: app record - :param message: message - :param query: query - :param app_generate_entity: application generate entity - """ - # annotation reply annotation_reply = self.query_app_annotations_to_reply( app_record=app_record, message=message, @@ -258,8 +227,6 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): def _complete_with_stream_output(self, text: str, stopped_by: QueueStopEvent.StopBy) -> None: """ Direct output - :param text: text - :return: """ self._publish_event(QueueTextChunkEvent(text=text)) diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index fd63c7787f..e4cb3f8527 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -1,7 +1,7 @@ import json import logging import time -from collections.abc import Generator +from collections.abc import Generator, Mapping from typing import Any, Optional, Union from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME @@ -9,6 +9,7 @@ from core.app.apps.advanced_chat.app_generator_tts_publisher import AppGenerator from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom from core.app.entities.app_invoke_entities import ( AdvancedChatAppGenerateEntity, + InvokeFrom, ) from core.app.entities.queue_entities import ( QueueAdvancedChatMessageEndEvent, @@ -50,10 +51,12 @@ from core.model_runtime.utils.encoders import jsonable_encoder from core.ops.ops_trace_manager import TraceQueueManager from core.workflow.enums import SystemVariableKey from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState +from core.workflow.nodes import NodeType from events.message_event import message_was_created from extensions.ext_database import db +from models import Conversation, EndUser, Message, MessageFile from models.account import Account -from models.model import Conversation, EndUser, Message +from models.enums import CreatedByRole from models.workflow import ( Workflow, WorkflowNodeExecution, @@ -120,6 +123,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc self._wip_workflow_node_executions = {} self._conversation_name_generate_thread = None + self._recorded_files: list[Mapping[str, Any]] = [] def process(self): """ @@ -298,6 +302,10 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc elif isinstance(event, QueueNodeSucceededEvent): workflow_node_execution = self._handle_workflow_node_execution_success(event) + # Record files if it's an answer node or end node + if event.node_type in [NodeType.ANSWER, NodeType.END]: + self._recorded_files.extend(self._fetch_files_from_node_outputs(event.outputs or {})) + response = self._workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, @@ -364,7 +372,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc start_at=graph_runtime_state.start_at, total_tokens=graph_runtime_state.total_tokens, total_steps=graph_runtime_state.node_run_steps, - outputs=json.dumps(event.outputs) if event.outputs else None, + outputs=event.outputs, conversation_id=self._conversation.id, trace_manager=trace_manager, ) @@ -490,10 +498,6 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc self._conversation_name_generate_thread.join() def _save_message(self, graph_runtime_state: Optional[GraphRuntimeState] = None) -> None: - """ - Save message. - :return: - """ self._refetch_message() self._message.answer = self._task_state.answer @@ -501,6 +505,22 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc self._message.message_metadata = ( json.dumps(jsonable_encoder(self._task_state.metadata)) if self._task_state.metadata else None ) + message_files = [ + MessageFile( + message_id=self._message.id, + type=file["type"], + transfer_method=file["transfer_method"], + url=file["remote_url"], + belongs_to="assistant", + upload_file_id=file["related_id"], + created_by_role=CreatedByRole.ACCOUNT + if self._message.invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} + else CreatedByRole.END_USER, + created_by=self._message.from_account_id or self._message.from_end_user_id or "", + ) + for file in self._recorded_files + ] + db.session.add_all(message_files) if graph_runtime_state and graph_runtime_state.llm_usage: usage = graph_runtime_state.llm_usage @@ -540,7 +560,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc del extras["metadata"]["annotation_reply"] return MessageEndStreamResponse( - task_id=self._application_generate_entity.task_id, id=self._message.id, **extras + task_id=self._application_generate_entity.task_id, id=self._message.id, files=self._recorded_files, **extras ) def _handle_output_moderation_chunk(self, text: str) -> bool: diff --git a/api/core/app/apps/agent_chat/app_generator.py b/api/core/app/apps/agent_chat/app_generator.py index 81c3c765dc..de12f5a441 100644 --- a/api/core/app/apps/agent_chat/app_generator.py +++ b/api/core/app/apps/agent_chat/app_generator.py @@ -18,12 +18,12 @@ from core.app.apps.base_app_queue_manager import AppQueueManager, GenerateTaskSt from core.app.apps.message_based_app_generator import MessageBasedAppGenerator from core.app.apps.message_based_app_queue_manager import MessageBasedAppQueueManager from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity, InvokeFrom -from core.file.message_file_parser import MessageFileParser from core.model_runtime.errors.invoke import InvokeAuthorizationError, InvokeError from core.ops.ops_trace_manager import TraceQueueManager from extensions.ext_database import db -from models.account import Account -from models.model import App, EndUser +from factories import file_factory +from models import Account, App, EndUser +from models.enums import CreatedByRole logger = logging.getLogger(__name__) @@ -50,7 +50,12 @@ class AgentChatAppGenerator(MessageBasedAppGenerator): ) -> dict: ... def generate( - self, app_model: App, user: Union[Account, EndUser], args: Any, invoke_from: InvokeFrom, stream: bool = True + self, + app_model: App, + user: Union[Account, EndUser], + args: Any, + invoke_from: InvokeFrom, + stream: bool = True, ) -> Union[dict, Generator[dict, None, None]]: """ Generate App response. @@ -98,12 +103,19 @@ class AgentChatAppGenerator(MessageBasedAppGenerator): # always enable retriever resource in debugger mode override_model_config_dict["retriever_resource"] = {"enabled": True} + role = CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER + # parse files - files = args["files"] if args.get("files") else [] - message_file_parser = MessageFileParser(tenant_id=app_model.tenant_id, app_id=app_model.id) + files = args.get("files") or [] file_extra_config = FileUploadConfigManager.convert(override_model_config_dict or app_model_config.to_dict()) if file_extra_config: - file_objs = message_file_parser.validate_and_transform_files_arg(files, file_extra_config, user) + file_objs = file_factory.build_from_mappings( + mappings=files, + tenant_id=app_model.tenant_id, + user_id=user.id, + role=role, + config=file_extra_config, + ) else: file_objs = [] @@ -116,8 +128,7 @@ class AgentChatAppGenerator(MessageBasedAppGenerator): ) # get tracing instance - user_id = user.id if isinstance(user, Account) else user.session_id - trace_manager = TraceQueueManager(app_model.id, user_id) + trace_manager = TraceQueueManager(app_model.id, user.id if isinstance(user, Account) else user.session_id) # init application generate entity application_generate_entity = AgentChatAppGenerateEntity( @@ -125,7 +136,9 @@ class AgentChatAppGenerator(MessageBasedAppGenerator): app_config=app_config, model_conf=ModelConfigConverter.convert(app_config), conversation_id=conversation.id if conversation else None, - inputs=conversation.inputs if conversation else self._get_cleaned_inputs(inputs, app_config), + inputs=conversation.inputs + if conversation + else self._prepare_user_inputs(user_inputs=inputs, app_config=app_config, user_id=user.id, role=role), query=query, files=file_objs, parent_message_id=args.get("parent_message_id") if invoke_from != InvokeFrom.SERVICE_API else UUID_NIL, diff --git a/api/core/app/apps/base_app_generator.py b/api/core/app/apps/base_app_generator.py index 15be7000fc..2707ada6cb 100644 --- a/api/core/app/apps/base_app_generator.py +++ b/api/core/app/apps/base_app_generator.py @@ -1,35 +1,92 @@ from collections.abc import Mapping -from typing import Any, Optional +from typing import TYPE_CHECKING, Any, Optional -from core.app.app_config.entities import AppConfig, VariableEntity, VariableEntityType +from core.app.app_config.entities import VariableEntityType +from core.file import File, FileExtraConfig +from factories import file_factory + +if TYPE_CHECKING: + from core.app.app_config.entities import AppConfig, VariableEntity + from models.enums import CreatedByRole class BaseAppGenerator: - def _get_cleaned_inputs(self, user_inputs: Optional[Mapping[str, Any]], app_config: AppConfig) -> Mapping[str, Any]: + def _prepare_user_inputs( + self, + *, + user_inputs: Optional[Mapping[str, Any]], + app_config: "AppConfig", + user_id: str, + role: "CreatedByRole", + ) -> Mapping[str, Any]: user_inputs = user_inputs or {} # Filter input variables from form configuration, handle required fields, default values, and option values variables = app_config.variables - filtered_inputs = {var.variable: self._validate_input(inputs=user_inputs, var=var) for var in variables} - filtered_inputs = {k: self._sanitize_value(v) for k, v in filtered_inputs.items()} - return filtered_inputs + user_inputs = {var.variable: self._validate_input(inputs=user_inputs, var=var) for var in variables} + user_inputs = {k: self._sanitize_value(v) for k, v in user_inputs.items()} + # Convert files in inputs to File + entity_dictionary = {item.variable: item for item in app_config.variables} + # Convert single file to File + files_inputs = { + k: file_factory.build_from_mapping( + mapping=v, + tenant_id=app_config.tenant_id, + user_id=user_id, + role=role, + config=FileExtraConfig( + allowed_file_types=entity_dictionary[k].allowed_file_types, + allowed_extensions=entity_dictionary[k].allowed_file_extensions, + allowed_upload_methods=entity_dictionary[k].allowed_file_upload_methods, + ), + ) + for k, v in user_inputs.items() + if isinstance(v, dict) and entity_dictionary[k].type == VariableEntityType.FILE + } + # Convert list of files to File + file_list_inputs = { + k: file_factory.build_from_mappings( + mappings=v, + tenant_id=app_config.tenant_id, + user_id=user_id, + role=role, + config=FileExtraConfig( + allowed_file_types=entity_dictionary[k].allowed_file_types, + allowed_extensions=entity_dictionary[k].allowed_file_extensions, + allowed_upload_methods=entity_dictionary[k].allowed_file_upload_methods, + ), + ) + for k, v in user_inputs.items() + if isinstance(v, list) + # Ensure skip List + and all(isinstance(item, dict) for item in v) + and entity_dictionary[k].type == VariableEntityType.FILE_LIST + } + # Merge all inputs + user_inputs = {**user_inputs, **files_inputs, **file_list_inputs} - def _validate_input(self, *, inputs: Mapping[str, Any], var: VariableEntity): - user_input_value = inputs.get(var.variable) - if var.required and not user_input_value: - raise ValueError(f"{var.variable} is required in input form") - if not var.required and not user_input_value: - # TODO: should we return None here if the default value is None? - return var.default or "" - if ( - var.type - in { - VariableEntityType.TEXT_INPUT, - VariableEntityType.SELECT, - VariableEntityType.PARAGRAPH, - } - and user_input_value - and not isinstance(user_input_value, str) + # Check if all files are converted to File + if any(filter(lambda v: isinstance(v, dict), user_inputs.values())): + raise ValueError("Invalid input type") + if any( + filter(lambda v: isinstance(v, dict), filter(lambda item: isinstance(item, list), user_inputs.values())) ): + raise ValueError("Invalid input type") + + return user_inputs + + def _validate_input(self, *, inputs: Mapping[str, Any], var: "VariableEntity"): + user_input_value = inputs.get(var.variable) + if not user_input_value: + if var.required: + raise ValueError(f"{var.variable} is required in input form") + else: + return None + + if var.type in { + VariableEntityType.TEXT_INPUT, + VariableEntityType.SELECT, + VariableEntityType.PARAGRAPH, + } and not isinstance(user_input_value, str): raise ValueError(f"(type '{var.type}') {var.variable} in input form must be a string") if var.type == VariableEntityType.NUMBER and isinstance(user_input_value, str): # may raise ValueError if user_input_value is not a valid number @@ -41,12 +98,24 @@ class BaseAppGenerator: except ValueError: raise ValueError(f"{var.variable} in input form must be a valid number") if var.type == VariableEntityType.SELECT: - options = var.options or [] + options = var.options if user_input_value not in options: raise ValueError(f"{var.variable} in input form must be one of the following: {options}") elif var.type in {VariableEntityType.TEXT_INPUT, VariableEntityType.PARAGRAPH}: - if var.max_length and user_input_value and len(user_input_value) > var.max_length: + if var.max_length and len(user_input_value) > var.max_length: raise ValueError(f"{var.variable} in input form must be less than {var.max_length} characters") + elif var.type == VariableEntityType.FILE: + if not isinstance(user_input_value, dict) and not isinstance(user_input_value, File): + raise ValueError(f"{var.variable} in input form must be a file") + elif var.type == VariableEntityType.FILE_LIST: + if not ( + isinstance(user_input_value, list) + and ( + all(isinstance(item, dict) for item in user_input_value) + or all(isinstance(item, File) for item in user_input_value) + ) + ): + raise ValueError(f"{var.variable} in input form must be a list of files") return user_input_value diff --git a/api/core/app/apps/base_app_runner.py b/api/core/app/apps/base_app_runner.py index 203aca3384..609fd03f22 100644 --- a/api/core/app/apps/base_app_runner.py +++ b/api/core/app/apps/base_app_runner.py @@ -27,7 +27,7 @@ from core.prompt.simple_prompt_transform import ModelMode, SimplePromptTransform from models.model import App, AppMode, Message, MessageAnnotation if TYPE_CHECKING: - from core.file.file_obj import FileVar + from core.file.models import File class AppRunner: @@ -37,7 +37,7 @@ class AppRunner: model_config: ModelConfigWithCredentialsEntity, prompt_template_entity: PromptTemplateEntity, inputs: dict[str, str], - files: list["FileVar"], + files: list["File"], query: Optional[str] = None, ) -> int: """ @@ -137,7 +137,7 @@ class AppRunner: model_config: ModelConfigWithCredentialsEntity, prompt_template_entity: PromptTemplateEntity, inputs: dict[str, str], - files: list["FileVar"], + files: list["File"], query: Optional[str] = None, context: Optional[str] = None, memory: Optional[TokenBufferMemory] = None, diff --git a/api/core/app/apps/chat/app_generator.py b/api/core/app/apps/chat/app_generator.py index 49b56ecc67..5c074f5306 100644 --- a/api/core/app/apps/chat/app_generator.py +++ b/api/core/app/apps/chat/app_generator.py @@ -18,11 +18,12 @@ from core.app.apps.chat.generate_response_converter import ChatAppGenerateRespon from core.app.apps.message_based_app_generator import MessageBasedAppGenerator from core.app.apps.message_based_app_queue_manager import MessageBasedAppQueueManager from core.app.entities.app_invoke_entities import ChatAppGenerateEntity, InvokeFrom -from core.file.message_file_parser import MessageFileParser from core.model_runtime.errors.invoke import InvokeAuthorizationError, InvokeError from core.ops.ops_trace_manager import TraceQueueManager from extensions.ext_database import db +from factories import file_factory from models.account import Account +from models.enums import CreatedByRole from models.model import App, EndUser logger = logging.getLogger(__name__) @@ -100,12 +101,19 @@ class ChatAppGenerator(MessageBasedAppGenerator): # always enable retriever resource in debugger mode override_model_config_dict["retriever_resource"] = {"enabled": True} + role = CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER + # parse files files = args["files"] if args.get("files") else [] - message_file_parser = MessageFileParser(tenant_id=app_model.tenant_id, app_id=app_model.id) file_extra_config = FileUploadConfigManager.convert(override_model_config_dict or app_model_config.to_dict()) if file_extra_config: - file_objs = message_file_parser.validate_and_transform_files_arg(files, file_extra_config, user) + file_objs = file_factory.build_from_mappings( + mappings=files, + tenant_id=app_model.tenant_id, + user_id=user.id, + role=role, + config=file_extra_config, + ) else: file_objs = [] @@ -118,7 +126,7 @@ class ChatAppGenerator(MessageBasedAppGenerator): ) # get tracing instance - trace_manager = TraceQueueManager(app_model.id) + trace_manager = TraceQueueManager(app_id=app_model.id) # init application generate entity application_generate_entity = ChatAppGenerateEntity( @@ -126,15 +134,17 @@ class ChatAppGenerator(MessageBasedAppGenerator): app_config=app_config, model_conf=ModelConfigConverter.convert(app_config), conversation_id=conversation.id if conversation else None, - inputs=conversation.inputs if conversation else self._get_cleaned_inputs(inputs, app_config), + inputs=conversation.inputs + if conversation + else self._prepare_user_inputs(user_inputs=inputs, app_config=app_config, user_id=user.id, role=role), query=query, files=file_objs, parent_message_id=args.get("parent_message_id") if invoke_from != InvokeFrom.SERVICE_API else UUID_NIL, user_id=user.id, - stream=stream, invoke_from=invoke_from, extras=extras, trace_manager=trace_manager, + stream=stream, ) # init generate records diff --git a/api/core/app/apps/completion/app_generator.py b/api/core/app/apps/completion/app_generator.py index 7fce296f2b..46450d39c0 100644 --- a/api/core/app/apps/completion/app_generator.py +++ b/api/core/app/apps/completion/app_generator.py @@ -17,12 +17,12 @@ from core.app.apps.completion.generate_response_converter import CompletionAppGe from core.app.apps.message_based_app_generator import MessageBasedAppGenerator from core.app.apps.message_based_app_queue_manager import MessageBasedAppQueueManager from core.app.entities.app_invoke_entities import CompletionAppGenerateEntity, InvokeFrom -from core.file.message_file_parser import MessageFileParser from core.model_runtime.errors.invoke import InvokeAuthorizationError, InvokeError from core.ops.ops_trace_manager import TraceQueueManager from extensions.ext_database import db -from models.account import Account -from models.model import App, EndUser, Message +from factories import file_factory +from models import Account, App, EndUser, Message +from models.enums import CreatedByRole from services.errors.app import MoreLikeThisDisabledError from services.errors.message import MessageNotExistsError @@ -88,12 +88,19 @@ class CompletionAppGenerator(MessageBasedAppGenerator): tenant_id=app_model.tenant_id, config=args.get("model_config") ) + role = CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER + # parse files files = args["files"] if args.get("files") else [] - message_file_parser = MessageFileParser(tenant_id=app_model.tenant_id, app_id=app_model.id) file_extra_config = FileUploadConfigManager.convert(override_model_config_dict or app_model_config.to_dict()) if file_extra_config: - file_objs = message_file_parser.validate_and_transform_files_arg(files, file_extra_config, user) + file_objs = file_factory.build_from_mappings( + mappings=files, + tenant_id=app_model.tenant_id, + user_id=user.id, + role=role, + config=file_extra_config, + ) else: file_objs = [] @@ -103,6 +110,7 @@ class CompletionAppGenerator(MessageBasedAppGenerator): ) # get tracing instance + user_id = user.id if isinstance(user, Account) else user.session_id trace_manager = TraceQueueManager(app_model.id) # init application generate entity @@ -110,7 +118,7 @@ class CompletionAppGenerator(MessageBasedAppGenerator): task_id=str(uuid.uuid4()), app_config=app_config, model_conf=ModelConfigConverter.convert(app_config), - inputs=self._get_cleaned_inputs(inputs, app_config), + inputs=self._prepare_user_inputs(user_inputs=inputs, app_config=app_config, user_id=user.id, role=role), query=query, files=file_objs, user_id=user.id, @@ -251,10 +259,16 @@ class CompletionAppGenerator(MessageBasedAppGenerator): override_model_config_dict["model"] = model_dict # parse files - message_file_parser = MessageFileParser(tenant_id=app_model.tenant_id, app_id=app_model.id) - file_extra_config = FileUploadConfigManager.convert(override_model_config_dict or app_model_config.to_dict()) + role = CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER + file_extra_config = FileUploadConfigManager.convert(override_model_config_dict) if file_extra_config: - file_objs = message_file_parser.validate_and_transform_files_arg(message.files, file_extra_config, user) + file_objs = file_factory.build_from_mappings( + mappings=message.message_files, + tenant_id=app_model.tenant_id, + user_id=user.id, + role=role, + config=file_extra_config, + ) else: file_objs = [] diff --git a/api/core/app/apps/message_based_app_generator.py b/api/core/app/apps/message_based_app_generator.py index 65b759acf5..bae64368e3 100644 --- a/api/core/app/apps/message_based_app_generator.py +++ b/api/core/app/apps/message_based_app_generator.py @@ -26,7 +26,8 @@ from core.app.entities.task_entities import ( from core.app.task_pipeline.easy_ui_based_generate_task_pipeline import EasyUIBasedGenerateTaskPipeline from core.prompt.utils.prompt_template_parser import PromptTemplateParser from extensions.ext_database import db -from models.account import Account +from models import Account +from models.enums import CreatedByRole from models.model import App, AppMode, AppModelConfig, Conversation, EndUser, Message, MessageFile from services.errors.app_model_config import AppModelConfigBrokenError from services.errors.conversation import ConversationCompletedError, ConversationNotExistsError @@ -235,13 +236,13 @@ class MessageBasedAppGenerator(BaseAppGenerator): for file in application_generate_entity.files: message_file = MessageFile( message_id=message.id, - type=file.type.value, - transfer_method=file.transfer_method.value, + type=file.type, + transfer_method=file.transfer_method, belongs_to="user", - url=file.url, + url=file.remote_url, upload_file_id=file.related_id, - created_by_role=("account" if account_id else "end_user"), - created_by=account_id or end_user_id, + created_by_role=(CreatedByRole.ACCOUNT if account_id else CreatedByRole.END_USER), + created_by=account_id or end_user_id or "", ) db.session.add(message_file) db.session.commit() diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index bd0ab53278..a865c8a68b 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -3,7 +3,7 @@ import logging import os import threading import uuid -from collections.abc import Generator +from collections.abc import Generator, Mapping, Sequence from typing import Any, Literal, Optional, Union, overload from flask import Flask, current_app @@ -20,13 +20,12 @@ from core.app.apps.workflow.generate_response_converter import WorkflowAppGenera from core.app.apps.workflow.generate_task_pipeline import WorkflowAppGenerateTaskPipeline from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity from core.app.entities.task_entities import WorkflowAppBlockingResponse, WorkflowAppStreamResponse -from core.file.message_file_parser import MessageFileParser from core.model_runtime.errors.invoke import InvokeAuthorizationError, InvokeError from core.ops.ops_trace_manager import TraceQueueManager from extensions.ext_database import db -from models.account import Account -from models.model import App, EndUser -from models.workflow import Workflow +from factories import file_factory +from models import Account, App, EndUser, Workflow +from models.enums import CreatedByRole logger = logging.getLogger(__name__) @@ -63,49 +62,46 @@ class WorkflowAppGenerator(BaseAppGenerator): app_model: App, workflow: Workflow, user: Union[Account, EndUser], - args: dict, + args: Mapping[str, Any], invoke_from: InvokeFrom, stream: bool = True, call_depth: int = 0, workflow_thread_pool_id: Optional[str] = None, ): - """ - Generate App response. + files: Sequence[Mapping[str, Any]] = args.get("files") or [] - :param app_model: App - :param workflow: Workflow - :param user: account or end user - :param args: request args - :param invoke_from: invoke from source - :param stream: is stream - :param call_depth: call depth - :param workflow_thread_pool_id: workflow thread pool id - """ - inputs = args["inputs"] + role = CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER # parse files - files = args["files"] if args.get("files") else [] - message_file_parser = MessageFileParser(tenant_id=app_model.tenant_id, app_id=app_model.id) file_extra_config = FileUploadConfigManager.convert(workflow.features_dict, is_vision=False) - if file_extra_config: - file_objs = message_file_parser.validate_and_transform_files_arg(files, file_extra_config, user) - else: - file_objs = [] + system_files = file_factory.build_from_mappings( + mappings=files, + tenant_id=app_model.tenant_id, + user_id=user.id, + role=role, + config=file_extra_config, + ) # convert to app config - app_config = WorkflowAppConfigManager.get_app_config(app_model=app_model, workflow=workflow) + app_config = WorkflowAppConfigManager.get_app_config( + app_model=app_model, + workflow=workflow, + ) # get tracing instance - user_id = user.id if isinstance(user, Account) else user.session_id - trace_manager = TraceQueueManager(app_model.id, user_id) + trace_manager = TraceQueueManager( + app_id=app_model.id, + user_id=user.id if isinstance(user, Account) else user.session_id, + ) + inputs: Mapping[str, Any] = args["inputs"] workflow_run_id = str(uuid.uuid4()) # init application generate entity application_generate_entity = WorkflowAppGenerateEntity( task_id=str(uuid.uuid4()), app_config=app_config, - inputs=self._get_cleaned_inputs(inputs, app_config), - files=file_objs, + inputs=self._prepare_user_inputs(user_inputs=inputs, app_config=app_config, user_id=user.id, role=role), + files=system_files, user_id=user.id, stream=stream, invoke_from=invoke_from, diff --git a/api/core/app/apps/workflow/app_runner.py b/api/core/app/apps/workflow/app_runner.py index 378a4bb8bc..faefcb0ed5 100644 --- a/api/core/app/apps/workflow/app_runner.py +++ b/api/core/app/apps/workflow/app_runner.py @@ -1,21 +1,20 @@ import logging -import os from typing import Optional, cast +from configs import dify_config from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.apps.workflow.app_config_manager import WorkflowAppConfig from core.app.apps.workflow_app_runner import WorkflowBasedAppRunner -from core.app.apps.workflow_logging_callback import WorkflowLoggingCallback from core.app.entities.app_invoke_entities import ( InvokeFrom, WorkflowAppGenerateEntity, ) -from core.workflow.callbacks.base_workflow_callback import WorkflowCallback -from core.workflow.entities.node_entities import UserFrom +from core.workflow.callbacks import WorkflowCallback, WorkflowLoggingCallback from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import SystemVariableKey from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db +from models.enums import UserFrom from models.model import App, EndUser from models.workflow import WorkflowType @@ -71,7 +70,7 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): db.session.close() workflow_callbacks: list[WorkflowCallback] = [] - if bool(os.environ.get("DEBUG", "False").lower() == "true"): + if dify_config.DEBUG: workflow_callbacks.append(WorkflowLoggingCallback()) # if only single iteration run is requested diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index 7c53556e43..419a5da806 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -1,4 +1,3 @@ -import json import logging import time from collections.abc import Generator @@ -334,9 +333,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa start_at=graph_runtime_state.start_at, total_tokens=graph_runtime_state.total_tokens, total_steps=graph_runtime_state.node_run_steps, - outputs=json.dumps(event.outputs) - if isinstance(event, QueueWorkflowSucceededEvent) and event.outputs - else None, + outputs=event.outputs, conversation_id=None, trace_manager=trace_manager, ) diff --git a/api/core/app/apps/workflow_app_runner.py b/api/core/app/apps/workflow_app_runner.py index ce266116a7..ca23bbdd47 100644 --- a/api/core/app/apps/workflow_app_runner.py +++ b/api/core/app/apps/workflow_app_runner.py @@ -20,7 +20,6 @@ from core.app.entities.queue_entities import ( QueueWorkflowStartedEvent, QueueWorkflowSucceededEvent, ) -from core.workflow.entities.node_entities import NodeType from core.workflow.entities.variable_pool import VariablePool from core.workflow.graph_engine.entities.event import ( GraphEngineEvent, @@ -41,9 +40,9 @@ from core.workflow.graph_engine.entities.event import ( ParallelBranchRunSucceededEvent, ) from core.workflow.graph_engine.entities.graph import Graph -from core.workflow.nodes.base_node import BaseNode -from core.workflow.nodes.iteration.entities import IterationNodeData -from core.workflow.nodes.node_mapping import node_classes +from core.workflow.nodes import NodeType +from core.workflow.nodes.iteration import IterationNodeData +from core.workflow.nodes.node_mapping import node_type_classes_mapping from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db from models.model import App @@ -137,9 +136,8 @@ class WorkflowBasedAppRunner(AppRunner): raise ValueError("iteration node id not found in workflow graph") # Get node class - node_type = NodeType.value_of(iteration_node_config.get("data", {}).get("type")) - node_cls = node_classes.get(node_type) - node_cls = cast(type[BaseNode], node_cls) + node_type = NodeType(iteration_node_config.get("data", {}).get("type")) + node_cls = node_type_classes_mapping[node_type] # init variable pool variable_pool = VariablePool( diff --git a/api/core/app/apps/workflow_logging_callback.py b/api/core/app/apps/workflow_logging_callback.py deleted file mode 100644 index 60683b0f21..0000000000 --- a/api/core/app/apps/workflow_logging_callback.py +++ /dev/null @@ -1,220 +0,0 @@ -from typing import Optional - -from core.model_runtime.utils.encoders import jsonable_encoder -from core.workflow.callbacks.base_workflow_callback import WorkflowCallback -from core.workflow.graph_engine.entities.event import ( - GraphEngineEvent, - GraphRunFailedEvent, - GraphRunStartedEvent, - GraphRunSucceededEvent, - IterationRunFailedEvent, - IterationRunNextEvent, - IterationRunStartedEvent, - IterationRunSucceededEvent, - NodeRunFailedEvent, - NodeRunStartedEvent, - NodeRunStreamChunkEvent, - NodeRunSucceededEvent, - ParallelBranchRunFailedEvent, - ParallelBranchRunStartedEvent, - ParallelBranchRunSucceededEvent, -) - -_TEXT_COLOR_MAPPING = { - "blue": "36;1", - "yellow": "33;1", - "pink": "38;5;200", - "green": "32;1", - "red": "31;1", -} - - -class WorkflowLoggingCallback(WorkflowCallback): - def __init__(self) -> None: - self.current_node_id = None - - def on_event(self, event: GraphEngineEvent) -> None: - if isinstance(event, GraphRunStartedEvent): - self.print_text("\n[GraphRunStartedEvent]", color="pink") - elif isinstance(event, GraphRunSucceededEvent): - self.print_text("\n[GraphRunSucceededEvent]", color="green") - elif isinstance(event, GraphRunFailedEvent): - self.print_text(f"\n[GraphRunFailedEvent] reason: {event.error}", color="red") - elif isinstance(event, NodeRunStartedEvent): - self.on_workflow_node_execute_started(event=event) - elif isinstance(event, NodeRunSucceededEvent): - self.on_workflow_node_execute_succeeded(event=event) - elif isinstance(event, NodeRunFailedEvent): - self.on_workflow_node_execute_failed(event=event) - elif isinstance(event, NodeRunStreamChunkEvent): - self.on_node_text_chunk(event=event) - elif isinstance(event, ParallelBranchRunStartedEvent): - self.on_workflow_parallel_started(event=event) - elif isinstance(event, ParallelBranchRunSucceededEvent | ParallelBranchRunFailedEvent): - self.on_workflow_parallel_completed(event=event) - elif isinstance(event, IterationRunStartedEvent): - self.on_workflow_iteration_started(event=event) - elif isinstance(event, IterationRunNextEvent): - self.on_workflow_iteration_next(event=event) - elif isinstance(event, IterationRunSucceededEvent | IterationRunFailedEvent): - self.on_workflow_iteration_completed(event=event) - else: - self.print_text(f"\n[{event.__class__.__name__}]", color="blue") - - def on_workflow_node_execute_started(self, event: NodeRunStartedEvent) -> None: - """ - Workflow node execute started - """ - self.print_text("\n[NodeRunStartedEvent]", color="yellow") - self.print_text(f"Node ID: {event.node_id}", color="yellow") - self.print_text(f"Node Title: {event.node_data.title}", color="yellow") - self.print_text(f"Type: {event.node_type.value}", color="yellow") - - def on_workflow_node_execute_succeeded(self, event: NodeRunSucceededEvent) -> None: - """ - Workflow node execute succeeded - """ - route_node_state = event.route_node_state - - self.print_text("\n[NodeRunSucceededEvent]", color="green") - self.print_text(f"Node ID: {event.node_id}", color="green") - self.print_text(f"Node Title: {event.node_data.title}", color="green") - self.print_text(f"Type: {event.node_type.value}", color="green") - - if route_node_state.node_run_result: - node_run_result = route_node_state.node_run_result - self.print_text( - f"Inputs: {jsonable_encoder(node_run_result.inputs) if node_run_result.inputs else ''}", - color="green", - ) - self.print_text( - f"Process Data: " - f"{jsonable_encoder(node_run_result.process_data) if node_run_result.process_data else ''}", - color="green", - ) - self.print_text( - f"Outputs: {jsonable_encoder(node_run_result.outputs) if node_run_result.outputs else ''}", - color="green", - ) - self.print_text( - f"Metadata: {jsonable_encoder(node_run_result.metadata) if node_run_result.metadata else ''}", - color="green", - ) - - def on_workflow_node_execute_failed(self, event: NodeRunFailedEvent) -> None: - """ - Workflow node execute failed - """ - route_node_state = event.route_node_state - - self.print_text("\n[NodeRunFailedEvent]", color="red") - self.print_text(f"Node ID: {event.node_id}", color="red") - self.print_text(f"Node Title: {event.node_data.title}", color="red") - self.print_text(f"Type: {event.node_type.value}", color="red") - - if route_node_state.node_run_result: - node_run_result = route_node_state.node_run_result - self.print_text(f"Error: {node_run_result.error}", color="red") - self.print_text( - f"Inputs: {jsonable_encoder(node_run_result.inputs) if node_run_result.inputs else ''}", - color="red", - ) - self.print_text( - f"Process Data: " - f"{jsonable_encoder(node_run_result.process_data) if node_run_result.process_data else ''}", - color="red", - ) - self.print_text( - f"Outputs: {jsonable_encoder(node_run_result.outputs) if node_run_result.outputs else ''}", - color="red", - ) - - def on_node_text_chunk(self, event: NodeRunStreamChunkEvent) -> None: - """ - Publish text chunk - """ - route_node_state = event.route_node_state - if not self.current_node_id or self.current_node_id != route_node_state.node_id: - self.current_node_id = route_node_state.node_id - self.print_text("\n[NodeRunStreamChunkEvent]") - self.print_text(f"Node ID: {route_node_state.node_id}") - - node_run_result = route_node_state.node_run_result - if node_run_result: - self.print_text( - f"Metadata: {jsonable_encoder(node_run_result.metadata) if node_run_result.metadata else ''}" - ) - - self.print_text(event.chunk_content, color="pink", end="") - - def on_workflow_parallel_started(self, event: ParallelBranchRunStartedEvent) -> None: - """ - Publish parallel started - """ - self.print_text("\n[ParallelBranchRunStartedEvent]", color="blue") - self.print_text(f"Parallel ID: {event.parallel_id}", color="blue") - self.print_text(f"Branch ID: {event.parallel_start_node_id}", color="blue") - if event.in_iteration_id: - self.print_text(f"Iteration ID: {event.in_iteration_id}", color="blue") - - def on_workflow_parallel_completed( - self, event: ParallelBranchRunSucceededEvent | ParallelBranchRunFailedEvent - ) -> None: - """ - Publish parallel completed - """ - if isinstance(event, ParallelBranchRunSucceededEvent): - color = "blue" - elif isinstance(event, ParallelBranchRunFailedEvent): - color = "red" - - self.print_text( - "\n[ParallelBranchRunSucceededEvent]" - if isinstance(event, ParallelBranchRunSucceededEvent) - else "\n[ParallelBranchRunFailedEvent]", - color=color, - ) - self.print_text(f"Parallel ID: {event.parallel_id}", color=color) - self.print_text(f"Branch ID: {event.parallel_start_node_id}", color=color) - if event.in_iteration_id: - self.print_text(f"Iteration ID: {event.in_iteration_id}", color=color) - - if isinstance(event, ParallelBranchRunFailedEvent): - self.print_text(f"Error: {event.error}", color=color) - - def on_workflow_iteration_started(self, event: IterationRunStartedEvent) -> None: - """ - Publish iteration started - """ - self.print_text("\n[IterationRunStartedEvent]", color="blue") - self.print_text(f"Iteration Node ID: {event.iteration_id}", color="blue") - - def on_workflow_iteration_next(self, event: IterationRunNextEvent) -> None: - """ - Publish iteration next - """ - self.print_text("\n[IterationRunNextEvent]", color="blue") - self.print_text(f"Iteration Node ID: {event.iteration_id}", color="blue") - self.print_text(f"Iteration Index: {event.index}", color="blue") - - def on_workflow_iteration_completed(self, event: IterationRunSucceededEvent | IterationRunFailedEvent) -> None: - """ - Publish iteration completed - """ - self.print_text( - "\n[IterationRunSucceededEvent]" - if isinstance(event, IterationRunSucceededEvent) - else "\n[IterationRunFailedEvent]", - color="blue", - ) - self.print_text(f"Node ID: {event.iteration_id}", color="blue") - - def print_text(self, text: str, color: Optional[str] = None, end: str = "\n") -> None: - """Print text with highlighting and no end characters.""" - text_to_print = self._get_colored_text(text, color) if color else text - print(f"{text_to_print}", end=end) - - def _get_colored_text(self, text: str, color: str) -> str: - """Get colored text.""" - color_str = _TEXT_COLOR_MAPPING[color] - return f"\u001b[{color_str}m\033[1;3m{text}\u001b[0m" diff --git a/api/core/app/entities/app_invoke_entities.py b/api/core/app/entities/app_invoke_entities.py index 98685513a3..f2eba29323 100644 --- a/api/core/app/entities/app_invoke_entities.py +++ b/api/core/app/entities/app_invoke_entities.py @@ -1,4 +1,4 @@ -from collections.abc import Mapping +from collections.abc import Mapping, Sequence from enum import Enum from typing import Any, Optional @@ -7,7 +7,7 @@ from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validat from constants import UUID_NIL from core.app.app_config.entities import AppConfig, EasyUIBasedAppConfig, WorkflowUIBasedAppConfig from core.entities.provider_configuration import ProviderModelBundle -from core.file.file_obj import FileVar +from core.file.models import File from core.model_runtime.entities.model_entities import AIModelEntity from core.ops.ops_trace_manager import TraceQueueManager @@ -23,7 +23,7 @@ class InvokeFrom(Enum): DEBUGGER = "debugger" @classmethod - def value_of(cls, value: str) -> "InvokeFrom": + def value_of(cls, value: str): """ Get value of given mode. @@ -82,7 +82,7 @@ class AppGenerateEntity(BaseModel): app_config: AppConfig inputs: Mapping[str, Any] - files: list[FileVar] = [] + files: Sequence[File] user_id: str # extras diff --git a/api/core/app/entities/queue_entities.py b/api/core/app/entities/queue_entities.py index 4577e28535..bc43baf8a5 100644 --- a/api/core/app/entities/queue_entities.py +++ b/api/core/app/entities/queue_entities.py @@ -5,9 +5,10 @@ from typing import Any, Optional from pydantic import BaseModel, field_validator from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk -from core.workflow.entities.base_node_data_entities import BaseNodeData -from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeType +from core.workflow.entities.node_entities import NodeRunMetadataKey from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState +from core.workflow.nodes import NodeType +from core.workflow.nodes.base import BaseNodeData class QueueEvent(str, Enum): diff --git a/api/core/app/entities/task_entities.py b/api/core/app/entities/task_entities.py index 49e5f55ebc..4b5f4716ed 100644 --- a/api/core/app/entities/task_entities.py +++ b/api/core/app/entities/task_entities.py @@ -1,3 +1,4 @@ +from collections.abc import Mapping, Sequence from enum import Enum from typing import Any, Optional @@ -119,6 +120,7 @@ class MessageEndStreamResponse(StreamResponse): event: StreamEvent = StreamEvent.MESSAGE_END id: str metadata: dict = {} + files: Optional[Sequence[Mapping[str, Any]]] = None class MessageFileStreamResponse(StreamResponse): @@ -211,7 +213,7 @@ class WorkflowFinishStreamResponse(StreamResponse): created_by: Optional[dict] = None created_at: int finished_at: int - files: Optional[list[dict]] = [] + files: Optional[Sequence[Mapping[str, Any]]] = [] event: StreamEvent = StreamEvent.WORKFLOW_FINISHED workflow_run_id: str @@ -296,7 +298,7 @@ class NodeFinishStreamResponse(StreamResponse): execution_metadata: Optional[dict] = None created_at: int finished_at: int - files: Optional[list[dict]] = [] + files: Optional[Sequence[Mapping[str, Any]]] = [] parallel_id: Optional[str] = None parallel_start_node_id: Optional[str] = None parent_parallel_id: Optional[str] = None diff --git a/api/core/app/segments/__init__.py b/api/core/app/segments/__init__.py deleted file mode 100644 index 652ef243b4..0000000000 --- a/api/core/app/segments/__init__.py +++ /dev/null @@ -1,49 +0,0 @@ -from .segment_group import SegmentGroup -from .segments import ( - ArrayAnySegment, - ArraySegment, - FloatSegment, - IntegerSegment, - NoneSegment, - ObjectSegment, - Segment, - StringSegment, -) -from .types import SegmentType -from .variables import ( - ArrayAnyVariable, - ArrayNumberVariable, - ArrayObjectVariable, - ArrayStringVariable, - FloatVariable, - IntegerVariable, - NoneVariable, - ObjectVariable, - SecretVariable, - StringVariable, - Variable, -) - -__all__ = [ - "IntegerVariable", - "FloatVariable", - "ObjectVariable", - "SecretVariable", - "StringVariable", - "ArrayAnyVariable", - "Variable", - "SegmentType", - "SegmentGroup", - "Segment", - "NoneSegment", - "NoneVariable", - "IntegerSegment", - "FloatSegment", - "ObjectSegment", - "ArrayAnySegment", - "StringSegment", - "ArrayStringVariable", - "ArrayNumberVariable", - "ArrayObjectVariable", - "ArraySegment", -] diff --git a/api/core/app/segments/exc.py b/api/core/app/segments/exc.py deleted file mode 100644 index 5cf67c3bac..0000000000 --- a/api/core/app/segments/exc.py +++ /dev/null @@ -1,2 +0,0 @@ -class VariableError(ValueError): - pass diff --git a/api/core/app/segments/factory.py b/api/core/app/segments/factory.py deleted file mode 100644 index 40a69ed4eb..0000000000 --- a/api/core/app/segments/factory.py +++ /dev/null @@ -1,76 +0,0 @@ -from collections.abc import Mapping -from typing import Any - -from configs import dify_config - -from .exc import VariableError -from .segments import ( - ArrayAnySegment, - FloatSegment, - IntegerSegment, - NoneSegment, - ObjectSegment, - Segment, - StringSegment, -) -from .types import SegmentType -from .variables import ( - ArrayNumberVariable, - ArrayObjectVariable, - ArrayStringVariable, - FloatVariable, - IntegerVariable, - ObjectVariable, - SecretVariable, - StringVariable, - Variable, -) - - -def build_variable_from_mapping(mapping: Mapping[str, Any], /) -> Variable: - if (value_type := mapping.get("value_type")) is None: - raise VariableError("missing value type") - if not mapping.get("name"): - raise VariableError("missing name") - if (value := mapping.get("value")) is None: - raise VariableError("missing value") - match value_type: - case SegmentType.STRING: - result = StringVariable.model_validate(mapping) - case SegmentType.SECRET: - result = SecretVariable.model_validate(mapping) - case SegmentType.NUMBER if isinstance(value, int): - result = IntegerVariable.model_validate(mapping) - case SegmentType.NUMBER if isinstance(value, float): - result = FloatVariable.model_validate(mapping) - case SegmentType.NUMBER if not isinstance(value, float | int): - raise VariableError(f"invalid number value {value}") - case SegmentType.OBJECT if isinstance(value, dict): - result = ObjectVariable.model_validate(mapping) - case SegmentType.ARRAY_STRING if isinstance(value, list): - result = ArrayStringVariable.model_validate(mapping) - case SegmentType.ARRAY_NUMBER if isinstance(value, list): - result = ArrayNumberVariable.model_validate(mapping) - case SegmentType.ARRAY_OBJECT if isinstance(value, list): - result = ArrayObjectVariable.model_validate(mapping) - case _: - raise VariableError(f"not supported value type {value_type}") - if result.size > dify_config.MAX_VARIABLE_SIZE: - raise VariableError(f"variable size {result.size} exceeds limit {dify_config.MAX_VARIABLE_SIZE}") - return result - - -def build_segment(value: Any, /) -> Segment: - if value is None: - return NoneSegment() - if isinstance(value, str): - return StringSegment(value=value) - if isinstance(value, int): - return IntegerSegment(value=value) - if isinstance(value, float): - return FloatSegment(value=value) - if isinstance(value, dict): - return ObjectSegment(value=value) - if isinstance(value, list): - return ArrayAnySegment(value=value) - raise ValueError(f"not supported value {value}") diff --git a/api/core/app/segments/parser.py b/api/core/app/segments/parser.py deleted file mode 100644 index 3c4d7046f4..0000000000 --- a/api/core/app/segments/parser.py +++ /dev/null @@ -1,18 +0,0 @@ -import re - -from core.workflow.entities.variable_pool import VariablePool - -from . import SegmentGroup, factory - -VARIABLE_PATTERN = re.compile(r"\{\{#([a-zA-Z0-9_]{1,50}(?:\.[a-zA-Z_][a-zA-Z0-9_]{0,29}){1,10})#\}\}") - - -def convert_template(*, template: str, variable_pool: VariablePool): - parts = re.split(VARIABLE_PATTERN, template) - segments = [] - for part in filter(lambda x: x, parts): - if "." in part and (value := variable_pool.get(part.split("."))): - segments.append(value) - else: - segments.append(factory.build_segment(part)) - return SegmentGroup(value=segments) diff --git a/api/core/app/segments/segment_group.py b/api/core/app/segments/segment_group.py deleted file mode 100644 index b363255b2c..0000000000 --- a/api/core/app/segments/segment_group.py +++ /dev/null @@ -1,22 +0,0 @@ -from .segments import Segment -from .types import SegmentType - - -class SegmentGroup(Segment): - value_type: SegmentType = SegmentType.GROUP - value: list[Segment] - - @property - def text(self): - return "".join([segment.text for segment in self.value]) - - @property - def log(self): - return "".join([segment.log for segment in self.value]) - - @property - def markdown(self): - return "".join([segment.markdown for segment in self.value]) - - def to_object(self): - return [segment.to_object() for segment in self.value] diff --git a/api/core/app/segments/segments.py b/api/core/app/segments/segments.py deleted file mode 100644 index b26b3c8291..0000000000 --- a/api/core/app/segments/segments.py +++ /dev/null @@ -1,126 +0,0 @@ -import json -import sys -from collections.abc import Mapping, Sequence -from typing import Any - -from pydantic import BaseModel, ConfigDict, field_validator - -from .types import SegmentType - - -class Segment(BaseModel): - model_config = ConfigDict(frozen=True) - - value_type: SegmentType - value: Any - - @field_validator("value_type") - @classmethod - def validate_value_type(cls, value): - """ - This validator checks if the provided value is equal to the default value of the 'value_type' field. - If the value is different, a ValueError is raised. - """ - if value != cls.model_fields["value_type"].default: - raise ValueError("Cannot modify 'value_type'") - return value - - @property - def text(self) -> str: - return str(self.value) - - @property - def log(self) -> str: - return str(self.value) - - @property - def markdown(self) -> str: - return str(self.value) - - @property - def size(self) -> int: - return sys.getsizeof(self.value) - - def to_object(self) -> Any: - return self.value - - -class NoneSegment(Segment): - value_type: SegmentType = SegmentType.NONE - value: None = None - - @property - def text(self) -> str: - return "null" - - @property - def log(self) -> str: - return "null" - - @property - def markdown(self) -> str: - return "null" - - -class StringSegment(Segment): - value_type: SegmentType = SegmentType.STRING - value: str - - -class FloatSegment(Segment): - value_type: SegmentType = SegmentType.NUMBER - value: float - - -class IntegerSegment(Segment): - value_type: SegmentType = SegmentType.NUMBER - value: int - - -class ObjectSegment(Segment): - value_type: SegmentType = SegmentType.OBJECT - value: Mapping[str, Any] - - @property - def text(self) -> str: - return json.dumps(self.model_dump()["value"], ensure_ascii=False) - - @property - def log(self) -> str: - return json.dumps(self.model_dump()["value"], ensure_ascii=False, indent=2) - - @property - def markdown(self) -> str: - return json.dumps(self.model_dump()["value"], ensure_ascii=False, indent=2) - - -class ArraySegment(Segment): - @property - def markdown(self) -> str: - items = [] - for item in self.value: - if hasattr(item, "to_markdown"): - items.append(item.to_markdown()) - else: - items.append(str(item)) - return "\n".join(items) - - -class ArrayAnySegment(ArraySegment): - value_type: SegmentType = SegmentType.ARRAY_ANY - value: Sequence[Any] - - -class ArrayStringSegment(ArraySegment): - value_type: SegmentType = SegmentType.ARRAY_STRING - value: Sequence[str] - - -class ArrayNumberSegment(ArraySegment): - value_type: SegmentType = SegmentType.ARRAY_NUMBER - value: Sequence[float | int] - - -class ArrayObjectSegment(ArraySegment): - value_type: SegmentType = SegmentType.ARRAY_OBJECT - value: Sequence[Mapping[str, Any]] diff --git a/api/core/app/segments/types.py b/api/core/app/segments/types.py deleted file mode 100644 index 9cf0856df5..0000000000 --- a/api/core/app/segments/types.py +++ /dev/null @@ -1,15 +0,0 @@ -from enum import Enum - - -class SegmentType(str, Enum): - NONE = "none" - NUMBER = "number" - STRING = "string" - SECRET = "secret" - ARRAY_ANY = "array[any]" - ARRAY_STRING = "array[string]" - ARRAY_NUMBER = "array[number]" - ARRAY_OBJECT = "array[object]" - OBJECT = "object" - - GROUP = "group" diff --git a/api/core/app/segments/variables.py b/api/core/app/segments/variables.py deleted file mode 100644 index f0e403ab8d..0000000000 --- a/api/core/app/segments/variables.py +++ /dev/null @@ -1,75 +0,0 @@ -from pydantic import Field - -from core.helper import encrypter - -from .segments import ( - ArrayAnySegment, - ArrayNumberSegment, - ArrayObjectSegment, - ArrayStringSegment, - FloatSegment, - IntegerSegment, - NoneSegment, - ObjectSegment, - Segment, - StringSegment, -) -from .types import SegmentType - - -class Variable(Segment): - """ - A variable is a segment that has a name. - """ - - id: str = Field( - default="", - description="Unique identity for variable. It's only used by environment variables now.", - ) - name: str - description: str = Field(default="", description="Description of the variable.") - - -class StringVariable(StringSegment, Variable): - pass - - -class FloatVariable(FloatSegment, Variable): - pass - - -class IntegerVariable(IntegerSegment, Variable): - pass - - -class ObjectVariable(ObjectSegment, Variable): - pass - - -class ArrayAnyVariable(ArrayAnySegment, Variable): - pass - - -class ArrayStringVariable(ArrayStringSegment, Variable): - pass - - -class ArrayNumberVariable(ArrayNumberSegment, Variable): - pass - - -class ArrayObjectVariable(ArrayObjectSegment, Variable): - pass - - -class SecretVariable(StringVariable): - value_type: SegmentType = SegmentType.SECRET - - @property - def log(self) -> str: - return encrypter.obfuscated_token(self.value) - - -class NoneVariable(NoneSegment, Variable): - value_type: SegmentType = SegmentType.NONE - value: None = None diff --git a/api/core/app/task_pipeline/based_generate_task_pipeline.py b/api/core/app/task_pipeline/based_generate_task_pipeline.py index a43be5fdf2..51d610e2cb 100644 --- a/api/core/app/task_pipeline/based_generate_task_pipeline.py +++ b/api/core/app/task_pipeline/based_generate_task_pipeline.py @@ -53,7 +53,7 @@ class BasedGenerateTaskPipeline: self._output_moderation_handler = self._init_output_moderation() self._stream = stream - def _handle_error(self, event: QueueErrorEvent, message: Optional[Message] = None) -> Exception: + def _handle_error(self, event: QueueErrorEvent, message: Optional[Message] = None): """ Handle error event. :param event: event @@ -100,7 +100,7 @@ class BasedGenerateTaskPipeline: return message - def _error_to_stream_response(self, e: Exception) -> ErrorStreamResponse: + def _error_to_stream_response(self, e: Exception): """ Error to stream response. :param e: exception diff --git a/api/core/app/task_pipeline/workflow_cycle_manage.py b/api/core/app/task_pipeline/workflow_cycle_manage.py index b8f5ac2603..2abee5bef5 100644 --- a/api/core/app/task_pipeline/workflow_cycle_manage.py +++ b/api/core/app/task_pipeline/workflow_cycle_manage.py @@ -1,8 +1,11 @@ import json import time +from collections.abc import Mapping, Sequence from datetime import datetime, timezone from typing import Any, Optional, Union, cast +from sqlalchemy.orm import Session + from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom, WorkflowAppGenerateEntity from core.app.entities.queue_entities import ( QueueIterationCompletedEvent, @@ -27,27 +30,26 @@ from core.app.entities.task_entities import ( WorkflowStartStreamResponse, WorkflowTaskState, ) -from core.file.file_obj import FileVar +from core.file import FILE_MODEL_IDENTITY, File from core.model_runtime.utils.encoders import jsonable_encoder from core.ops.entities.trace_entity import TraceTaskName from core.ops.ops_trace_manager import TraceQueueManager, TraceTask from core.tools.tool_manager import ToolManager -from core.workflow.entities.node_entities import NodeType from core.workflow.enums import SystemVariableKey +from core.workflow.nodes import NodeType from core.workflow.nodes.tool.entities import ToolNodeData from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db from models.account import Account +from models.enums import CreatedByRole, WorkflowRunTriggeredFrom from models.model import EndUser from models.workflow import ( - CreatedByRole, Workflow, WorkflowNodeExecution, WorkflowNodeExecutionStatus, WorkflowNodeExecutionTriggeredFrom, WorkflowRun, WorkflowRunStatus, - WorkflowRunTriggeredFrom, ) @@ -117,7 +119,7 @@ class WorkflowCycleManage: start_at: float, total_tokens: int, total_steps: int, - outputs: Optional[str] = None, + outputs: Mapping[str, Any] | None = None, conversation_id: Optional[str] = None, trace_manager: Optional[TraceQueueManager] = None, ) -> WorkflowRun: @@ -133,8 +135,10 @@ class WorkflowCycleManage: """ workflow_run = self._refetch_workflow_run(workflow_run.id) + outputs = WorkflowEntry.handle_special_values(outputs) + workflow_run.status = WorkflowRunStatus.SUCCEEDED.value - workflow_run.outputs = outputs + workflow_run.outputs = json.dumps(outputs or {}) workflow_run.elapsed_time = time.perf_counter() - start_at workflow_run.total_tokens = total_tokens workflow_run.total_steps = total_steps @@ -230,30 +234,30 @@ class WorkflowCycleManage: self, workflow_run: WorkflowRun, event: QueueNodeStartedEvent ) -> WorkflowNodeExecution: # init workflow node execution - workflow_node_execution = WorkflowNodeExecution() - workflow_node_execution.tenant_id = workflow_run.tenant_id - workflow_node_execution.app_id = workflow_run.app_id - workflow_node_execution.workflow_id = workflow_run.workflow_id - workflow_node_execution.triggered_from = WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value - workflow_node_execution.workflow_run_id = workflow_run.id - workflow_node_execution.predecessor_node_id = event.predecessor_node_id - workflow_node_execution.index = event.node_run_index - workflow_node_execution.node_execution_id = event.node_execution_id - workflow_node_execution.node_id = event.node_id - workflow_node_execution.node_type = event.node_type.value - workflow_node_execution.title = event.node_data.title - workflow_node_execution.status = WorkflowNodeExecutionStatus.RUNNING.value - workflow_node_execution.created_by_role = workflow_run.created_by_role - workflow_node_execution.created_by = workflow_run.created_by - workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None) - db.session.add(workflow_node_execution) - db.session.commit() - db.session.refresh(workflow_node_execution) - db.session.close() + with Session(db.engine, expire_on_commit=False) as session: + workflow_node_execution = WorkflowNodeExecution() + workflow_node_execution.tenant_id = workflow_run.tenant_id + workflow_node_execution.app_id = workflow_run.app_id + workflow_node_execution.workflow_id = workflow_run.workflow_id + workflow_node_execution.triggered_from = WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value + workflow_node_execution.workflow_run_id = workflow_run.id + workflow_node_execution.predecessor_node_id = event.predecessor_node_id + workflow_node_execution.index = event.node_run_index + workflow_node_execution.node_execution_id = event.node_execution_id + workflow_node_execution.node_id = event.node_id + workflow_node_execution.node_type = event.node_type.value + workflow_node_execution.title = event.node_data.title + workflow_node_execution.status = WorkflowNodeExecutionStatus.RUNNING.value + workflow_node_execution.created_by_role = workflow_run.created_by_role + workflow_node_execution.created_by = workflow_run.created_by + workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None) + + session.add(workflow_node_execution) + session.commit() + session.refresh(workflow_node_execution) self._wip_workflow_node_executions[workflow_node_execution.node_execution_id] = workflow_node_execution - return workflow_node_execution def _handle_workflow_node_execution_success(self, event: QueueNodeSucceededEvent) -> WorkflowNodeExecution: @@ -265,6 +269,7 @@ class WorkflowCycleManage: workflow_node_execution = self._refetch_workflow_node_execution(event.node_execution_id) inputs = WorkflowEntry.handle_special_values(event.inputs) + process_data = WorkflowEntry.handle_special_values(event.process_data) outputs = WorkflowEntry.handle_special_values(event.outputs) execution_metadata = ( json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None @@ -276,7 +281,7 @@ class WorkflowCycleManage: { WorkflowNodeExecution.status: WorkflowNodeExecutionStatus.SUCCEEDED.value, WorkflowNodeExecution.inputs: json.dumps(inputs) if inputs else None, - WorkflowNodeExecution.process_data: json.dumps(event.process_data) if event.process_data else None, + WorkflowNodeExecution.process_data: json.dumps(process_data) if event.process_data else None, WorkflowNodeExecution.outputs: json.dumps(outputs) if outputs else None, WorkflowNodeExecution.execution_metadata: execution_metadata, WorkflowNodeExecution.finished_at: finished_at, @@ -286,10 +291,11 @@ class WorkflowCycleManage: db.session.commit() db.session.close() + process_data = WorkflowEntry.handle_special_values(event.process_data) workflow_node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED.value workflow_node_execution.inputs = json.dumps(inputs) if inputs else None - workflow_node_execution.process_data = json.dumps(event.process_data) if event.process_data else None + workflow_node_execution.process_data = json.dumps(process_data) if process_data else None workflow_node_execution.outputs = json.dumps(outputs) if outputs else None workflow_node_execution.execution_metadata = execution_metadata workflow_node_execution.finished_at = finished_at @@ -308,6 +314,7 @@ class WorkflowCycleManage: workflow_node_execution = self._refetch_workflow_node_execution(event.node_execution_id) inputs = WorkflowEntry.handle_special_values(event.inputs) + process_data = WorkflowEntry.handle_special_values(event.process_data) outputs = WorkflowEntry.handle_special_values(event.outputs) finished_at = datetime.now(timezone.utc).replace(tzinfo=None) elapsed_time = (finished_at - event.start_at).total_seconds() @@ -317,7 +324,7 @@ class WorkflowCycleManage: WorkflowNodeExecution.status: WorkflowNodeExecutionStatus.FAILED.value, WorkflowNodeExecution.error: event.error, WorkflowNodeExecution.inputs: json.dumps(inputs) if inputs else None, - WorkflowNodeExecution.process_data: json.dumps(event.process_data) if event.process_data else None, + WorkflowNodeExecution.process_data: json.dumps(process_data) if event.process_data else None, WorkflowNodeExecution.outputs: json.dumps(outputs) if outputs else None, WorkflowNodeExecution.finished_at: finished_at, WorkflowNodeExecution.elapsed_time: elapsed_time, @@ -326,11 +333,12 @@ class WorkflowCycleManage: db.session.commit() db.session.close() + process_data = WorkflowEntry.handle_special_values(event.process_data) workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value workflow_node_execution.error = event.error workflow_node_execution.inputs = json.dumps(inputs) if inputs else None - workflow_node_execution.process_data = json.dumps(event.process_data) if event.process_data else None + workflow_node_execution.process_data = json.dumps(process_data) if process_data else None workflow_node_execution.outputs = json.dumps(outputs) if outputs else None workflow_node_execution.finished_at = finished_at workflow_node_execution.elapsed_time = elapsed_time @@ -637,7 +645,7 @@ class WorkflowCycleManage: ), ) - def _fetch_files_from_node_outputs(self, outputs_dict: dict) -> list[dict]: + def _fetch_files_from_node_outputs(self, outputs_dict: dict) -> Sequence[Mapping[str, Any]]: """ Fetch files from node outputs :param outputs_dict: node outputs dict @@ -646,15 +654,15 @@ class WorkflowCycleManage: if not outputs_dict: return [] - files = [] - for output_var, output_value in outputs_dict.items(): - file_vars = self._fetch_files_from_variable_value(output_value) - if file_vars: - files.extend(file_vars) + files = [self._fetch_files_from_variable_value(output_value) for output_value in outputs_dict.values()] + # Remove None + files = [file for file in files if file] + # Flatten list + files = [file for sublist in files for file in sublist] return files - def _fetch_files_from_variable_value(self, value: Union[dict, list]) -> list[dict]: + def _fetch_files_from_variable_value(self, value: Union[dict, list]) -> Sequence[Mapping[str, Any]]: """ Fetch files from variable value :param value: variable value @@ -666,17 +674,17 @@ class WorkflowCycleManage: files = [] if isinstance(value, list): for item in value: - file_var = self._get_file_var_from_value(item) - if file_var: - files.append(file_var) + file = self._get_file_var_from_value(item) + if file: + files.append(file) elif isinstance(value, dict): - file_var = self._get_file_var_from_value(value) - if file_var: - files.append(file_var) + file = self._get_file_var_from_value(value) + if file: + files.append(file) return files - def _get_file_var_from_value(self, value: Union[dict, list]) -> Optional[dict]: + def _get_file_var_from_value(self, value: Union[dict, list]) -> Mapping[str, Any] | None: """ Get file var from value :param value: variable value @@ -685,14 +693,11 @@ class WorkflowCycleManage: if not value: return None - if isinstance(value, dict): - if "__variant" in value and value["__variant"] == FileVar.__name__: - return value - elif isinstance(value, FileVar): + if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY: + return value + elif isinstance(value, File): return value.to_dict() - return None - def _refetch_workflow_run(self, workflow_run_id: str) -> WorkflowRun: """ Refetch workflow run diff --git a/api/core/entities/message_entities.py b/api/core/entities/message_entities.py deleted file mode 100644 index 10bc9f6ed7..0000000000 --- a/api/core/entities/message_entities.py +++ /dev/null @@ -1,29 +0,0 @@ -import enum -from typing import Any - -from pydantic import BaseModel - - -class PromptMessageFileType(enum.Enum): - IMAGE = "image" - - @staticmethod - def value_of(value): - for member in PromptMessageFileType: - if member.value == value: - return member - raise ValueError(f"No matching enum found for value '{value}'") - - -class PromptMessageFile(BaseModel): - type: PromptMessageFileType - data: Any = None - - -class ImagePromptMessageFile(PromptMessageFile): - class DETAIL(enum.Enum): - LOW = "low" - HIGH = "high" - - type: PromptMessageFileType = PromptMessageFileType.IMAGE - detail: DETAIL = DETAIL.LOW diff --git a/api/core/file/__init__.py b/api/core/file/__init__.py index e69de29bb2..bdaf8793fa 100644 --- a/api/core/file/__init__.py +++ b/api/core/file/__init__.py @@ -0,0 +1,19 @@ +from .constants import FILE_MODEL_IDENTITY +from .enums import ArrayFileAttribute, FileAttribute, FileBelongsTo, FileTransferMethod, FileType +from .models import ( + File, + FileExtraConfig, + ImageConfig, +) + +__all__ = [ + "FileType", + "FileExtraConfig", + "FileTransferMethod", + "FileBelongsTo", + "File", + "ImageConfig", + "FileAttribute", + "ArrayFileAttribute", + "FILE_MODEL_IDENTITY", +] diff --git a/api/core/file/file_obj.py b/api/core/file/file_obj.py deleted file mode 100644 index 5c4e694025..0000000000 --- a/api/core/file/file_obj.py +++ /dev/null @@ -1,145 +0,0 @@ -import enum -from typing import Any, Optional - -from pydantic import BaseModel - -from core.file.tool_file_parser import ToolFileParser -from core.file.upload_file_parser import UploadFileParser -from core.model_runtime.entities.message_entities import ImagePromptMessageContent -from extensions.ext_database import db - - -class FileExtraConfig(BaseModel): - """ - File Upload Entity. - """ - - image_config: Optional[dict[str, Any]] = None - - -class FileType(enum.Enum): - IMAGE = "image" - - @staticmethod - def value_of(value): - for member in FileType: - if member.value == value: - return member - raise ValueError(f"No matching enum found for value '{value}'") - - -class FileTransferMethod(enum.Enum): - REMOTE_URL = "remote_url" - LOCAL_FILE = "local_file" - TOOL_FILE = "tool_file" - - @staticmethod - def value_of(value): - for member in FileTransferMethod: - if member.value == value: - return member - raise ValueError(f"No matching enum found for value '{value}'") - - -class FileBelongsTo(enum.Enum): - USER = "user" - ASSISTANT = "assistant" - - @staticmethod - def value_of(value): - for member in FileBelongsTo: - if member.value == value: - return member - raise ValueError(f"No matching enum found for value '{value}'") - - -class FileVar(BaseModel): - id: Optional[str] = None # message file id - tenant_id: str - type: FileType - transfer_method: FileTransferMethod - url: Optional[str] = None # remote url - related_id: Optional[str] = None - extra_config: Optional[FileExtraConfig] = None - filename: Optional[str] = None - extension: Optional[str] = None - mime_type: Optional[str] = None - - def to_dict(self) -> dict: - return { - "__variant": self.__class__.__name__, - "tenant_id": self.tenant_id, - "type": self.type.value, - "transfer_method": self.transfer_method.value, - "url": self.preview_url, - "remote_url": self.url, - "related_id": self.related_id, - "filename": self.filename, - "extension": self.extension, - "mime_type": self.mime_type, - } - - def to_markdown(self) -> str: - """ - Convert file to markdown - :return: - """ - preview_url = self.preview_url - if self.type == FileType.IMAGE: - text = f'![{self.filename or ""}]({preview_url})' - else: - text = f"[{self.filename or preview_url}]({preview_url})" - - return text - - @property - def data(self) -> Optional[str]: - """ - Get image data, file signed url or base64 data - depending on config MULTIMODAL_SEND_IMAGE_FORMAT - :return: - """ - return self._get_data() - - @property - def preview_url(self) -> Optional[str]: - """ - Get signed preview url - :return: - """ - return self._get_data(force_url=True) - - @property - def prompt_message_content(self) -> ImagePromptMessageContent: - if self.type == FileType.IMAGE: - image_config = self.extra_config.image_config - - return ImagePromptMessageContent( - data=self.data, - detail=ImagePromptMessageContent.DETAIL.HIGH - if image_config.get("detail") == "high" - else ImagePromptMessageContent.DETAIL.LOW, - ) - - def _get_data(self, force_url: bool = False) -> Optional[str]: - from models.model import UploadFile - - if self.type == FileType.IMAGE: - if self.transfer_method == FileTransferMethod.REMOTE_URL: - return self.url - elif self.transfer_method == FileTransferMethod.LOCAL_FILE: - upload_file = ( - db.session.query(UploadFile) - .filter(UploadFile.id == self.related_id, UploadFile.tenant_id == self.tenant_id) - .first() - ) - - return UploadFileParser.get_image_data(upload_file=upload_file, force_url=force_url) - elif self.transfer_method == FileTransferMethod.TOOL_FILE: - extension = self.extension - # add sign url - return ToolFileParser.get_tool_file_manager().sign_file( - tool_file_id=self.related_id, extension=extension - ) - - return None diff --git a/api/core/file/message_file_parser.py b/api/core/file/message_file_parser.py deleted file mode 100644 index 641686bd7c..0000000000 --- a/api/core/file/message_file_parser.py +++ /dev/null @@ -1,243 +0,0 @@ -import re -from collections.abc import Mapping, Sequence -from typing import Any, Union -from urllib.parse import parse_qs, urlparse - -import requests - -from core.file.file_obj import FileBelongsTo, FileExtraConfig, FileTransferMethod, FileType, FileVar -from extensions.ext_database import db -from models.account import Account -from models.model import EndUser, MessageFile, UploadFile -from services.file_service import IMAGE_EXTENSIONS - - -class MessageFileParser: - def __init__(self, tenant_id: str, app_id: str) -> None: - self.tenant_id = tenant_id - self.app_id = app_id - - def validate_and_transform_files_arg( - self, files: Sequence[Mapping[str, Any]], file_extra_config: FileExtraConfig, user: Union[Account, EndUser] - ) -> list[FileVar]: - """ - validate and transform files arg - - :param files: - :param file_extra_config: - :param user: - :return: - """ - for file in files: - if not isinstance(file, dict): - raise ValueError("Invalid file format, must be dict") - if not file.get("type"): - raise ValueError("Missing file type") - FileType.value_of(file.get("type")) - if not file.get("transfer_method"): - raise ValueError("Missing file transfer method") - FileTransferMethod.value_of(file.get("transfer_method")) - if file.get("transfer_method") == FileTransferMethod.REMOTE_URL.value: - if not file.get("url"): - raise ValueError("Missing file url") - if not file.get("url").startswith("http"): - raise ValueError("Invalid file url") - if file.get("transfer_method") == FileTransferMethod.LOCAL_FILE.value and not file.get("upload_file_id"): - raise ValueError("Missing file upload_file_id") - if file.get("transform_method") == FileTransferMethod.TOOL_FILE.value and not file.get("tool_file_id"): - raise ValueError("Missing file tool_file_id") - - # transform files to file objs - type_file_objs = self._to_file_objs(files, file_extra_config) - - # validate files - new_files = [] - for file_type, file_objs in type_file_objs.items(): - if file_type == FileType.IMAGE: - # parse and validate files - image_config = file_extra_config.image_config - - # check if image file feature is enabled - if not image_config: - continue - - # Validate number of files - if len(files) > image_config["number_limits"]: - raise ValueError(f"Number of image files exceeds the maximum limit {image_config['number_limits']}") - - for file_obj in file_objs: - # Validate transfer method - if file_obj.transfer_method.value not in image_config["transfer_methods"]: - raise ValueError(f"Invalid transfer method: {file_obj.transfer_method.value}") - - # Validate file type - if file_obj.type != FileType.IMAGE: - raise ValueError(f"Invalid file type: {file_obj.type}") - - if file_obj.transfer_method == FileTransferMethod.REMOTE_URL: - # check remote url valid and is image - result, error = self._check_image_remote_url(file_obj.url) - if result is False: - raise ValueError(error) - elif file_obj.transfer_method == FileTransferMethod.LOCAL_FILE: - # get upload file from upload_file_id - upload_file = ( - db.session.query(UploadFile) - .filter( - UploadFile.id == file_obj.related_id, - UploadFile.tenant_id == self.tenant_id, - UploadFile.created_by == user.id, - UploadFile.created_by_role == ("account" if isinstance(user, Account) else "end_user"), - UploadFile.extension.in_(IMAGE_EXTENSIONS), - ) - .first() - ) - - # check upload file is belong to tenant and user - if not upload_file: - raise ValueError("Invalid upload file") - - new_files.append(file_obj) - - # return all file objs - return new_files - - def transform_message_files(self, files: list[MessageFile], file_extra_config: FileExtraConfig): - """ - transform message files - - :param files: - :param file_extra_config: - :return: - """ - # transform files to file objs - type_file_objs = self._to_file_objs(files, file_extra_config) - - # return all file objs - return [file_obj for file_objs in type_file_objs.values() for file_obj in file_objs] - - def _to_file_objs( - self, files: list[Union[dict, MessageFile]], file_extra_config: FileExtraConfig - ) -> dict[FileType, list[FileVar]]: - """ - transform files to file objs - - :param files: - :param file_extra_config: - :return: - """ - type_file_objs: dict[FileType, list[FileVar]] = { - # Currently only support image - FileType.IMAGE: [] - } - - if not files: - return type_file_objs - - # group by file type and convert file args or message files to FileObj - for file in files: - if isinstance(file, MessageFile): - if file.belongs_to == FileBelongsTo.ASSISTANT.value: - continue - - file_obj = self._to_file_obj(file, file_extra_config) - if file_obj.type not in type_file_objs: - continue - - type_file_objs[file_obj.type].append(file_obj) - - return type_file_objs - - def _to_file_obj(self, file: Union[dict, MessageFile], file_extra_config: FileExtraConfig): - """ - transform file to file obj - - :param file: - :return: - """ - if isinstance(file, dict): - transfer_method = FileTransferMethod.value_of(file.get("transfer_method")) - if transfer_method != FileTransferMethod.TOOL_FILE: - return FileVar( - tenant_id=self.tenant_id, - type=FileType.value_of(file.get("type")), - transfer_method=transfer_method, - url=file.get("url") if transfer_method == FileTransferMethod.REMOTE_URL else None, - related_id=file.get("upload_file_id") if transfer_method == FileTransferMethod.LOCAL_FILE else None, - extra_config=file_extra_config, - ) - return FileVar( - tenant_id=self.tenant_id, - type=FileType.value_of(file.get("type")), - transfer_method=transfer_method, - url=None, - related_id=file.get("tool_file_id"), - extra_config=file_extra_config, - ) - else: - return FileVar( - id=file.id, - tenant_id=self.tenant_id, - type=FileType.value_of(file.type), - transfer_method=FileTransferMethod.value_of(file.transfer_method), - url=file.url, - related_id=file.upload_file_id or None, - extra_config=file_extra_config, - ) - - def _check_image_remote_url(self, url): - try: - headers = { - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)" - " Chrome/91.0.4472.124 Safari/537.36" - } - - def is_s3_presigned_url(url): - try: - parsed_url = urlparse(url) - if "amazonaws.com" not in parsed_url.netloc: - return False - query_params = parse_qs(parsed_url.query) - - def check_presign_v2(query_params): - required_params = ["Signature", "Expires"] - for param in required_params: - if param not in query_params: - return False - if not query_params["Expires"][0].isdigit(): - return False - signature = query_params["Signature"][0] - if not re.match(r"^[A-Za-z0-9+/]+={0,2}$", signature): - return False - - return True - - def check_presign_v4(query_params): - required_params = ["X-Amz-Signature", "X-Amz-Expires"] - for param in required_params: - if param not in query_params: - return False - if not query_params["X-Amz-Expires"][0].isdigit(): - return False - signature = query_params["X-Amz-Signature"][0] - if not re.match(r"^[A-Za-z0-9+/]+={0,2}$", signature): - return False - - return True - - return check_presign_v4(query_params) or check_presign_v2(query_params) - except Exception: - return False - - if is_s3_presigned_url(url): - response = requests.get(url, headers=headers, allow_redirects=True) - if response.status_code in {200, 304}: - return True, "" - - response = requests.head(url, headers=headers, allow_redirects=True) - if response.status_code in {200, 304}: - return True, "" - else: - return False, "URL does not exist." - except requests.RequestException as e: - return False, f"Error checking URL: {e}" diff --git a/api/core/file/tool_file_parser.py b/api/core/file/tool_file_parser.py index 1efaf5529d..a17b7be367 100644 --- a/api/core/file/tool_file_parser.py +++ b/api/core/file/tool_file_parser.py @@ -1,4 +1,9 @@ -tool_file_manager = {"manager": None} +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from core.tools.tool_file_manager import ToolFileManager + +tool_file_manager: dict[str, Any] = {"manager": None} class ToolFileParser: diff --git a/api/core/file/upload_file_parser.py b/api/core/file/upload_file_parser.py deleted file mode 100644 index a8c1fd4d02..0000000000 --- a/api/core/file/upload_file_parser.py +++ /dev/null @@ -1,79 +0,0 @@ -import base64 -import hashlib -import hmac -import logging -import os -import time -from typing import Optional - -from configs import dify_config -from extensions.ext_storage import storage - -IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"] -IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS]) - - -class UploadFileParser: - @classmethod - def get_image_data(cls, upload_file, force_url: bool = False) -> Optional[str]: - if not upload_file: - return None - - if upload_file.extension not in IMAGE_EXTENSIONS: - return None - - if dify_config.MULTIMODAL_SEND_IMAGE_FORMAT == "url" or force_url: - return cls.get_signed_temp_image_url(upload_file.id) - else: - # get image file base64 - try: - data = storage.load(upload_file.key) - except FileNotFoundError: - logging.error(f"File not found: {upload_file.key}") - return None - - encoded_string = base64.b64encode(data).decode("utf-8") - return f"data:{upload_file.mime_type};base64,{encoded_string}" - - @classmethod - def get_signed_temp_image_url(cls, upload_file_id) -> str: - """ - get signed url from upload file - - :param upload_file: UploadFile object - :return: - """ - base_url = dify_config.FILES_URL - image_preview_url = f"{base_url}/files/{upload_file_id}/image-preview" - - timestamp = str(int(time.time())) - nonce = os.urandom(16).hex() - data_to_sign = f"image-preview|{upload_file_id}|{timestamp}|{nonce}" - secret_key = dify_config.SECRET_KEY.encode() - sign = hmac.new(secret_key, data_to_sign.encode(), hashlib.sha256).digest() - encoded_sign = base64.urlsafe_b64encode(sign).decode() - - return f"{image_preview_url}?timestamp={timestamp}&nonce={nonce}&sign={encoded_sign}" - - @classmethod - def verify_image_file_signature(cls, upload_file_id: str, timestamp: str, nonce: str, sign: str) -> bool: - """ - verify signature - - :param upload_file_id: file id - :param timestamp: timestamp - :param nonce: nonce - :param sign: signature - :return: - """ - data_to_sign = f"image-preview|{upload_file_id}|{timestamp}|{nonce}" - secret_key = dify_config.SECRET_KEY.encode() - recalculated_sign = hmac.new(secret_key, data_to_sign.encode(), hashlib.sha256).digest() - recalculated_encoded_sign = base64.urlsafe_b64encode(recalculated_sign).decode() - - # verify signature - if sign != recalculated_encoded_sign: - return False - - current_time = int(time.time()) - return current_time - int(timestamp) <= dify_config.FILES_ACCESS_TIMEOUT diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py index 4e6d58904e..6793e41978 100644 --- a/api/core/helper/ssrf_proxy.py +++ b/api/core/helper/ssrf_proxy.py @@ -13,8 +13,11 @@ SSRF_PROXY_HTTP_URL = os.getenv("SSRF_PROXY_HTTP_URL", "") SSRF_PROXY_HTTPS_URL = os.getenv("SSRF_PROXY_HTTPS_URL", "") SSRF_DEFAULT_MAX_RETRIES = int(os.getenv("SSRF_DEFAULT_MAX_RETRIES", "3")) -proxies = ( - {"http://": SSRF_PROXY_HTTP_URL, "https://": SSRF_PROXY_HTTPS_URL} +proxy_mounts = ( + { + "http://": httpx.HTTPTransport(proxy=SSRF_PROXY_HTTP_URL), + "https://": httpx.HTTPTransport(proxy=SSRF_PROXY_HTTPS_URL), + } if SSRF_PROXY_HTTP_URL and SSRF_PROXY_HTTPS_URL else None ) @@ -33,11 +36,14 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): while retries <= max_retries: try: if SSRF_PROXY_ALL_URL: - response = httpx.request(method=method, url=url, proxy=SSRF_PROXY_ALL_URL, **kwargs) - elif proxies: - response = httpx.request(method=method, url=url, proxies=proxies, **kwargs) + with httpx.Client(proxy=SSRF_PROXY_ALL_URL) as client: + response = client.request(method=method, url=url, **kwargs) + elif proxy_mounts: + with httpx.Client(mounts=proxy_mounts) as client: + response = client.request(method=method, url=url, **kwargs) else: - response = httpx.request(method=method, url=url, **kwargs) + with httpx.Client() as client: + response = client.request(method=method, url=url, **kwargs) if response.status_code not in STATUS_FORCELIST: return response diff --git a/api/core/hosting_configuration.py b/api/core/hosting_configuration.py index eeeccc2349..b47ba67f2f 100644 --- a/api/core/hosting_configuration.py +++ b/api/core/hosting_configuration.py @@ -1,8 +1,9 @@ from typing import Optional -from flask import Config, Flask +from flask import Flask from pydantic import BaseModel +from configs import dify_config from core.entities.provider_entities import QuotaUnit, RestrictModel from core.model_runtime.entities.model_entities import ModelType from models.provider import ProviderQuotaType @@ -44,32 +45,30 @@ class HostingConfiguration: moderation_config: HostedModerationConfig = None def init_app(self, app: Flask) -> None: - config = app.config - - if config.get("EDITION") != "CLOUD": + if dify_config.EDITION != "CLOUD": return - self.provider_map["azure_openai"] = self.init_azure_openai(config) - self.provider_map["openai"] = self.init_openai(config) - self.provider_map["anthropic"] = self.init_anthropic(config) - self.provider_map["minimax"] = self.init_minimax(config) - self.provider_map["spark"] = self.init_spark(config) - self.provider_map["zhipuai"] = self.init_zhipuai(config) + self.provider_map["azure_openai"] = self.init_azure_openai() + self.provider_map["openai"] = self.init_openai() + self.provider_map["anthropic"] = self.init_anthropic() + self.provider_map["minimax"] = self.init_minimax() + self.provider_map["spark"] = self.init_spark() + self.provider_map["zhipuai"] = self.init_zhipuai() - self.moderation_config = self.init_moderation_config(config) + self.moderation_config = self.init_moderation_config() @staticmethod - def init_azure_openai(app_config: Config) -> HostingProvider: + def init_azure_openai() -> HostingProvider: quota_unit = QuotaUnit.TIMES - if app_config.get("HOSTED_AZURE_OPENAI_ENABLED"): + if dify_config.HOSTED_AZURE_OPENAI_ENABLED: credentials = { - "openai_api_key": app_config.get("HOSTED_AZURE_OPENAI_API_KEY"), - "openai_api_base": app_config.get("HOSTED_AZURE_OPENAI_API_BASE"), + "openai_api_key": dify_config.HOSTED_AZURE_OPENAI_API_KEY, + "openai_api_base": dify_config.HOSTED_AZURE_OPENAI_API_BASE, "base_model_name": "gpt-35-turbo", } quotas = [] - hosted_quota_limit = int(app_config.get("HOSTED_AZURE_OPENAI_QUOTA_LIMIT", "1000")) + hosted_quota_limit = dify_config.HOSTED_AZURE_OPENAI_QUOTA_LIMIT trial_quota = TrialHostingQuota( quota_limit=hosted_quota_limit, restrict_models=[ @@ -122,31 +121,31 @@ class HostingConfiguration: quota_unit=quota_unit, ) - def init_openai(self, app_config: Config) -> HostingProvider: + def init_openai(self) -> HostingProvider: quota_unit = QuotaUnit.CREDITS quotas = [] - if app_config.get("HOSTED_OPENAI_TRIAL_ENABLED"): - hosted_quota_limit = int(app_config.get("HOSTED_OPENAI_QUOTA_LIMIT", "200")) - trial_models = self.parse_restrict_models_from_env(app_config, "HOSTED_OPENAI_TRIAL_MODELS") + if dify_config.HOSTED_OPENAI_TRIAL_ENABLED: + hosted_quota_limit = dify_config.HOSTED_OPENAI_QUOTA_LIMIT + trial_models = self.parse_restrict_models_from_env("HOSTED_OPENAI_TRIAL_MODELS") trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit, restrict_models=trial_models) quotas.append(trial_quota) - if app_config.get("HOSTED_OPENAI_PAID_ENABLED"): - paid_models = self.parse_restrict_models_from_env(app_config, "HOSTED_OPENAI_PAID_MODELS") + if dify_config.HOSTED_OPENAI_PAID_ENABLED: + paid_models = self.parse_restrict_models_from_env("HOSTED_OPENAI_PAID_MODELS") paid_quota = PaidHostingQuota(restrict_models=paid_models) quotas.append(paid_quota) if len(quotas) > 0: credentials = { - "openai_api_key": app_config.get("HOSTED_OPENAI_API_KEY"), + "openai_api_key": dify_config.HOSTED_OPENAI_API_KEY, } - if app_config.get("HOSTED_OPENAI_API_BASE"): - credentials["openai_api_base"] = app_config.get("HOSTED_OPENAI_API_BASE") + if dify_config.HOSTED_OPENAI_API_BASE: + credentials["openai_api_base"] = dify_config.HOSTED_OPENAI_API_BASE - if app_config.get("HOSTED_OPENAI_API_ORGANIZATION"): - credentials["openai_organization"] = app_config.get("HOSTED_OPENAI_API_ORGANIZATION") + if dify_config.HOSTED_OPENAI_API_ORGANIZATION: + credentials["openai_organization"] = dify_config.HOSTED_OPENAI_API_ORGANIZATION return HostingProvider(enabled=True, credentials=credentials, quota_unit=quota_unit, quotas=quotas) @@ -156,26 +155,26 @@ class HostingConfiguration: ) @staticmethod - def init_anthropic(app_config: Config) -> HostingProvider: + def init_anthropic() -> HostingProvider: quota_unit = QuotaUnit.TOKENS quotas = [] - if app_config.get("HOSTED_ANTHROPIC_TRIAL_ENABLED"): - hosted_quota_limit = int(app_config.get("HOSTED_ANTHROPIC_QUOTA_LIMIT", "0")) + if dify_config.HOSTED_ANTHROPIC_TRIAL_ENABLED: + hosted_quota_limit = dify_config.HOSTED_ANTHROPIC_QUOTA_LIMIT trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit) quotas.append(trial_quota) - if app_config.get("HOSTED_ANTHROPIC_PAID_ENABLED"): + if dify_config.HOSTED_ANTHROPIC_PAID_ENABLED: paid_quota = PaidHostingQuota() quotas.append(paid_quota) if len(quotas) > 0: credentials = { - "anthropic_api_key": app_config.get("HOSTED_ANTHROPIC_API_KEY"), + "anthropic_api_key": dify_config.HOSTED_ANTHROPIC_API_KEY, } - if app_config.get("HOSTED_ANTHROPIC_API_BASE"): - credentials["anthropic_api_url"] = app_config.get("HOSTED_ANTHROPIC_API_BASE") + if dify_config.HOSTED_ANTHROPIC_API_BASE: + credentials["anthropic_api_url"] = dify_config.HOSTED_ANTHROPIC_API_BASE return HostingProvider(enabled=True, credentials=credentials, quota_unit=quota_unit, quotas=quotas) @@ -185,9 +184,9 @@ class HostingConfiguration: ) @staticmethod - def init_minimax(app_config: Config) -> HostingProvider: + def init_minimax() -> HostingProvider: quota_unit = QuotaUnit.TOKENS - if app_config.get("HOSTED_MINIMAX_ENABLED"): + if dify_config.HOSTED_MINIMAX_ENABLED: quotas = [FreeHostingQuota()] return HostingProvider( @@ -203,9 +202,9 @@ class HostingConfiguration: ) @staticmethod - def init_spark(app_config: Config) -> HostingProvider: + def init_spark() -> HostingProvider: quota_unit = QuotaUnit.TOKENS - if app_config.get("HOSTED_SPARK_ENABLED"): + if dify_config.HOSTED_SPARK_ENABLED: quotas = [FreeHostingQuota()] return HostingProvider( @@ -221,9 +220,9 @@ class HostingConfiguration: ) @staticmethod - def init_zhipuai(app_config: Config) -> HostingProvider: + def init_zhipuai() -> HostingProvider: quota_unit = QuotaUnit.TOKENS - if app_config.get("HOSTED_ZHIPUAI_ENABLED"): + if dify_config.HOSTED_ZHIPUAI_ENABLED: quotas = [FreeHostingQuota()] return HostingProvider( @@ -239,17 +238,15 @@ class HostingConfiguration: ) @staticmethod - def init_moderation_config(app_config: Config) -> HostedModerationConfig: - if app_config.get("HOSTED_MODERATION_ENABLED") and app_config.get("HOSTED_MODERATION_PROVIDERS"): - return HostedModerationConfig( - enabled=True, providers=app_config.get("HOSTED_MODERATION_PROVIDERS").split(",") - ) + def init_moderation_config() -> HostedModerationConfig: + if dify_config.HOSTED_MODERATION_ENABLED and dify_config.HOSTED_MODERATION_PROVIDERS: + return HostedModerationConfig(enabled=True, providers=dify_config.HOSTED_MODERATION_PROVIDERS.split(",")) return HostedModerationConfig(enabled=False) @staticmethod - def parse_restrict_models_from_env(app_config: Config, env_var: str) -> list[RestrictModel]: - models_str = app_config.get(env_var) + def parse_restrict_models_from_env(env_var: str) -> list[RestrictModel]: + models_str = dify_config.model_dump().get(env_var) models_list = models_str.split(",") if models_str else [] return [ RestrictModel(model=model_name.strip(), model_type=ModelType.LLM) diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index 39bd6fee69..9cf9ed75c0 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -8,6 +8,8 @@ from core.llm_generator.output_parser.suggested_questions_after_answer import Su from core.llm_generator.prompts import ( CONVERSATION_TITLE_PROMPT, GENERATOR_QA_PROMPT, + JAVASCRIPT_CODE_GENERATOR_PROMPT_TEMPLATE, + PYTHON_CODE_GENERATOR_PROMPT_TEMPLATE, WORKFLOW_RULE_CONFIG_PROMPT_GENERATE_TEMPLATE, ) from core.model_manager import ModelManager @@ -239,6 +241,54 @@ class LLMGenerator: return rule_config + @classmethod + def generate_code( + cls, + tenant_id: str, + instruction: str, + model_config: dict, + code_language: str = "javascript", + max_tokens: int = 1000, + ) -> dict: + if code_language == "python": + prompt_template = PromptTemplateParser(PYTHON_CODE_GENERATOR_PROMPT_TEMPLATE) + else: + prompt_template = PromptTemplateParser(JAVASCRIPT_CODE_GENERATOR_PROMPT_TEMPLATE) + + prompt = prompt_template.format( + inputs={ + "INSTRUCTION": instruction, + "CODE_LANGUAGE": code_language, + }, + remove_template_variables=False, + ) + + model_manager = ModelManager() + model_instance = model_manager.get_model_instance( + tenant_id=tenant_id, + model_type=ModelType.LLM, + provider=model_config.get("provider") if model_config else None, + model=model_config.get("name") if model_config else None, + ) + + prompt_messages = [UserPromptMessage(content=prompt)] + model_parameters = {"max_tokens": max_tokens, "temperature": 0.01} + + try: + response = model_instance.invoke_llm( + prompt_messages=prompt_messages, model_parameters=model_parameters, stream=False + ) + + generated_code = response.message.content + return {"code": generated_code, "language": code_language, "error": ""} + + except InvokeError as e: + error = str(e) + return {"code": "", "language": code_language, "error": f"Failed to generate code. Error: {error}"} + except Exception as e: + logging.exception(e) + return {"code": "", "language": code_language, "error": f"An unexpected error occurred: {str(e)}"} + @classmethod def generate_qa_document(cls, tenant_id: str, query, document_language: str): prompt = GENERATOR_QA_PROMPT.format(language=document_language) diff --git a/api/core/llm_generator/prompts.py b/api/core/llm_generator/prompts.py index e5b6784516..7c0f247052 100644 --- a/api/core/llm_generator/prompts.py +++ b/api/core/llm_generator/prompts.py @@ -61,6 +61,73 @@ User Input: yo, 你今天咋样? User Input: """ # noqa: E501 +PYTHON_CODE_GENERATOR_PROMPT_TEMPLATE = ( + "You are an expert programmer. Generate code based on the following instructions:\n\n" + "Instructions: {{INSTRUCTION}}\n\n" + "Write the code in {{CODE_LANGUAGE}}.\n\n" + "Please ensure that you meet the following requirements:\n" + "1. Define a function named 'main'.\n" + "2. The 'main' function must return a dictionary (dict).\n" + "3. You may modify the arguments of the 'main' function, but include appropriate type hints.\n" + "4. The returned dictionary should contain at least one key-value pair.\n\n" + "5. You may ONLY use the following libraries in your code: \n" + "- json\n" + "- datetime\n" + "- math\n" + "- random\n" + "- re\n" + "- string\n" + "- sys\n" + "- time\n" + "- traceback\n" + "- uuid\n" + "- os\n" + "- base64\n" + "- hashlib\n" + "- hmac\n" + "- binascii\n" + "- collections\n" + "- functools\n" + "- operator\n" + "- itertools\n\n" + "Example:\n" + "def main(arg1: str, arg2: int) -> dict:\n" + " return {\n" + ' "result": arg1 * arg2,\n' + " }\n\n" + "IMPORTANT:\n" + "- Provide ONLY the code without any additional explanations, comments, or markdown formatting.\n" + "- DO NOT use markdown code blocks (``` or ``` python). Return the raw code directly.\n" + "- The code should start immediately after this instruction, without any preceding newlines or spaces.\n" + "- The code should be complete, functional, and follow best practices for {{CODE_LANGUAGE}}.\n\n" + "- Always use the format return {'result': ...} for the output.\n\n" + "Generated Code:\n" +) +JAVASCRIPT_CODE_GENERATOR_PROMPT_TEMPLATE = ( + "You are an expert programmer. Generate code based on the following instructions:\n\n" + "Instructions: {{INSTRUCTION}}\n\n" + "Write the code in {{CODE_LANGUAGE}}.\n\n" + "Please ensure that you meet the following requirements:\n" + "1. Define a function named 'main'.\n" + "2. The 'main' function must return an object.\n" + "3. You may modify the arguments of the 'main' function, but include appropriate JSDoc annotations.\n" + "4. The returned object should contain at least one key-value pair.\n\n" + "5. The returned object should always be in the format: {result: ...}\n\n" + "Example:\n" + "function main(arg1, arg2) {\n" + " return {\n" + " result: arg1 * arg2\n" + " };\n" + "}\n\n" + "IMPORTANT:\n" + "- Provide ONLY the code without any additional explanations, comments, or markdown formatting.\n" + "- DO NOT use markdown code blocks (``` or ``` javascript). Return the raw code directly.\n" + "- The code should start immediately after this instruction, without any preceding newlines or spaces.\n" + "- The code should be complete, functional, and follow best practices for {{CODE_LANGUAGE}}.\n\n" + "Generated Code:\n" +) + + SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT = ( "Please help me predict the three most likely questions that human would ask, " "and keeping each question under 20 characters.\n" diff --git a/api/core/memory/token_buffer_memory.py b/api/core/memory/token_buffer_memory.py index bc94912c1e..d92c36a2df 100644 --- a/api/core/memory/token_buffer_memory.py +++ b/api/core/memory/token_buffer_memory.py @@ -1,18 +1,21 @@ from typing import Optional from core.app.app_config.features.file_upload.manager import FileUploadConfigManager -from core.file.message_file_parser import MessageFileParser +from core.file import file_manager +from core.file.models import FileType from core.model_manager import ModelInstance -from core.model_runtime.entities.message_entities import ( +from core.model_runtime.entities import ( AssistantPromptMessage, ImagePromptMessageContent, PromptMessage, + PromptMessageContent, PromptMessageRole, TextPromptMessageContent, UserPromptMessage, ) from core.prompt.utils.extract_thread_messages import extract_thread_messages from extensions.ext_database import db +from factories import file_factory from models.model import AppMode, Conversation, Message, MessageFile from models.workflow import WorkflowRun @@ -65,13 +68,12 @@ class TokenBufferMemory: messages = list(reversed(thread_messages)) - message_file_parser = MessageFileParser(tenant_id=app_record.tenant_id, app_id=app_record.id) prompt_messages = [] for message in messages: files = db.session.query(MessageFile).filter(MessageFile.message_id == message.id).all() if files: file_extra_config = None - if self.conversation.mode not in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}: + if self.conversation.mode not in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}: file_extra_config = FileUploadConfigManager.convert(self.conversation.model_config) else: if message.workflow_run_id: @@ -84,17 +86,22 @@ class TokenBufferMemory: workflow_run.workflow.features_dict, is_vision=False ) - if file_extra_config: - file_objs = message_file_parser.transform_message_files(files, file_extra_config) + if file_extra_config and app_record: + file_objs = file_factory.build_from_message_files( + message_files=files, tenant_id=app_record.tenant_id, config=file_extra_config + ) else: file_objs = [] if not file_objs: prompt_messages.append(UserPromptMessage(content=message.query)) else: - prompt_message_contents = [TextPromptMessageContent(data=message.query)] + prompt_message_contents: list[PromptMessageContent] = [] + prompt_message_contents.append(TextPromptMessageContent(data=message.query)) for file_obj in file_objs: - prompt_message_contents.append(file_obj.prompt_message_content) + if file_obj.type in {FileType.IMAGE, FileType.AUDIO}: + prompt_message = file_manager.to_prompt_message_content(file_obj) + prompt_message_contents.append(prompt_message) prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) else: diff --git a/api/core/model_manager.py b/api/core/model_manager.py index e394233d2c..e21449ec24 100644 --- a/api/core/model_manager.py +++ b/api/core/model_manager.py @@ -1,7 +1,7 @@ import logging import os -from collections.abc import Callable, Generator, Sequence -from typing import IO, Optional, Union, cast +from collections.abc import Callable, Generator, Iterable, Sequence +from typing import IO, Any, Optional, Union, cast from core.entities.embedding_type import EmbeddingInputType from core.entities.provider_configuration import ProviderConfiguration, ProviderModelBundle @@ -274,7 +274,7 @@ class ModelInstance: user=user, ) - def invoke_tts(self, content_text: str, tenant_id: str, voice: str, user: Optional[str] = None) -> str: + def invoke_tts(self, content_text: str, tenant_id: str, voice: str, user: Optional[str] = None) -> Iterable[bytes]: """ Invoke large language tts model @@ -298,7 +298,7 @@ class ModelInstance: voice=voice, ) - def _round_robin_invoke(self, function: Callable, *args, **kwargs): + def _round_robin_invoke(self, function: Callable[..., Any], *args, **kwargs): """ Round-robin invoke :param function: function to invoke diff --git a/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md b/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md index f5b806ade6..f050919d81 100644 --- a/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md +++ b/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md @@ -218,7 +218,7 @@ For instance, Xinference supports `max_tokens`, `temperature`, and `top_p` param However, some vendors may support different parameters for different models. For example, the `OpenLLM` vendor supports `top_k`, but not all models provided by this vendor support `top_k`. Let's say model A supports `top_k` but model B does not. In such cases, we need to dynamically generate the model parameter schema, as illustrated below: ```python - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md b/api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md index 7b3a8edba3..240f65802b 100644 --- a/api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md +++ b/api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md @@ -205,7 +205,7 @@ provider_credential_schema: 但是有的供应商根据不同的模型支持不同的参数,如供应商`OpenLLM`支持`top_k`,但是并不是这个供应商提供的所有模型都支持`top_k`,我们这里举例A模型支持`top_k`,B模型不支持`top_k`,那么我们需要在这里动态生成模型参数的Schema,如下所示: ```python - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/entities/__init__.py b/api/core/model_runtime/entities/__init__.py index e69de29bb2..b3eb4d4dfe 100644 --- a/api/core/model_runtime/entities/__init__.py +++ b/api/core/model_runtime/entities/__init__.py @@ -0,0 +1,38 @@ +from .llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage +from .message_entities import ( + AssistantPromptMessage, + AudioPromptMessageContent, + ImagePromptMessageContent, + PromptMessage, + PromptMessageContent, + PromptMessageContentType, + PromptMessageRole, + PromptMessageTool, + SystemPromptMessage, + TextPromptMessageContent, + ToolPromptMessage, + UserPromptMessage, +) +from .model_entities import ModelPropertyKey + +__all__ = [ + "ImagePromptMessageContent", + "PromptMessage", + "PromptMessageRole", + "LLMUsage", + "ModelPropertyKey", + "AssistantPromptMessage", + "PromptMessage", + "PromptMessageContent", + "PromptMessageRole", + "SystemPromptMessage", + "TextPromptMessageContent", + "UserPromptMessage", + "PromptMessageTool", + "ToolPromptMessage", + "PromptMessageContentType", + "LLMResult", + "LLMResultChunk", + "LLMResultChunkDelta", + "AudioPromptMessageContent", +] diff --git a/api/core/model_runtime/entities/llm_entities.py b/api/core/model_runtime/entities/llm_entities.py index 52b590f66a..88531d8ae0 100644 --- a/api/core/model_runtime/entities/llm_entities.py +++ b/api/core/model_runtime/entities/llm_entities.py @@ -105,6 +105,7 @@ class LLMResult(BaseModel): Model class for llm result. """ + id: Optional[str] = None model: str prompt_messages: list[PromptMessage] message: AssistantPromptMessage diff --git a/api/core/model_runtime/entities/message_entities.py b/api/core/model_runtime/entities/message_entities.py index e51bb18deb..cda1639661 100644 --- a/api/core/model_runtime/entities/message_entities.py +++ b/api/core/model_runtime/entities/message_entities.py @@ -2,7 +2,7 @@ from abc import ABC from enum import Enum from typing import Optional -from pydantic import BaseModel, field_validator +from pydantic import BaseModel, Field, field_validator class PromptMessageRole(Enum): @@ -55,6 +55,7 @@ class PromptMessageContentType(Enum): TEXT = "text" IMAGE = "image" + AUDIO = "audio" class PromptMessageContent(BaseModel): @@ -74,12 +75,18 @@ class TextPromptMessageContent(PromptMessageContent): type: PromptMessageContentType = PromptMessageContentType.TEXT +class AudioPromptMessageContent(PromptMessageContent): + type: PromptMessageContentType = PromptMessageContentType.AUDIO + data: str = Field(..., description="Base64 encoded audio data") + format: str = Field(..., description="Audio format") + + class ImagePromptMessageContent(PromptMessageContent): """ Model class for image prompt message content. """ - class DETAIL(Enum): + class DETAIL(str, Enum): LOW = "low" HIGH = "high" diff --git a/api/core/model_runtime/model_providers/__base/large_language_model.py b/api/core/model_runtime/model_providers/__base/large_language_model.py index 0027411a6e..5b6f96129b 100644 --- a/api/core/model_runtime/model_providers/__base/large_language_model.py +++ b/api/core/model_runtime/model_providers/__base/large_language_model.py @@ -1,5 +1,4 @@ import logging -import os import re import time from abc import abstractmethod @@ -8,6 +7,7 @@ from typing import Optional, Union from pydantic import ConfigDict +from configs import dify_config from core.model_runtime.callbacks.base_callback import Callback from core.model_runtime.callbacks.logging_callback import LoggingCallback from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage @@ -77,7 +77,7 @@ class LargeLanguageModel(AIModel): callbacks = callbacks or [] - if bool(os.environ.get("DEBUG", "False").lower() == "true"): + if dify_config.DEBUG: callbacks.append(LoggingCallback()) # trigger before invoke callbacks @@ -107,7 +107,16 @@ class LargeLanguageModel(AIModel): callbacks=callbacks, ) else: - result = self._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) + result = self._invoke( + model=model, + credentials=credentials, + prompt_messages=prompt_messages, + model_parameters=model_parameters, + tools=tools, + stop=stop, + stream=stream, + user=user, + ) except Exception as e: self._trigger_invoke_error_callbacks( model=model, diff --git a/api/core/model_runtime/model_providers/__base/tts_model.py b/api/core/model_runtime/model_providers/__base/tts_model.py index 862ec29daf..b394ea4e9d 100644 --- a/api/core/model_runtime/model_providers/__base/tts_model.py +++ b/api/core/model_runtime/model_providers/__base/tts_model.py @@ -1,6 +1,7 @@ import logging import re from abc import abstractmethod +from collections.abc import Iterable from typing import Any, Optional from pydantic import ConfigDict @@ -22,8 +23,14 @@ class TTSModel(AIModel): model_config = ConfigDict(protected_namespaces=()) def invoke( - self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None - ): + self, + model: str, + tenant_id: str, + credentials: dict, + content_text: str, + voice: str, + user: Optional[str] = None, + ) -> Iterable[bytes]: """ Invoke large language model @@ -50,8 +57,14 @@ class TTSModel(AIModel): @abstractmethod def _invoke( - self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None - ): + self, + model: str, + tenant_id: str, + credentials: dict, + content_text: str, + voice: str, + user: Optional[str] = None, + ) -> Iterable[bytes]: """ Invoke large language model @@ -68,25 +81,25 @@ class TTSModel(AIModel): def get_tts_model_voices(self, model: str, credentials: dict, language: Optional[str] = None) -> list: """ - Get voice for given tts model voices + Retrieves the list of voices supported by a given text-to-speech (TTS) model. - :param language: tts language - :param model: model name - :param credentials: model credentials - :return: voices lists + :param language: The language for which the voices are requested. + :param model: The name of the TTS model. + :param credentials: The credentials required to access the TTS model. + :return: A list of voices supported by the TTS model. """ model_schema = self.get_model_schema(model, credentials) - if model_schema and ModelPropertyKey.VOICES in model_schema.model_properties: - voices = model_schema.model_properties[ModelPropertyKey.VOICES] - if language: - return [ - {"name": d["name"], "value": d["mode"]} - for d in voices - if language and language in d.get("language") - ] - else: - return [{"name": d["name"], "value": d["mode"]} for d in voices] + if not model_schema or ModelPropertyKey.VOICES not in model_schema.model_properties: + raise ValueError("this model does not support voice") + + voices = model_schema.model_properties[ModelPropertyKey.VOICES] + if language: + return [ + {"name": d["name"], "value": d["mode"]} for d in voices if language and language in d.get("language") + ] + else: + return [{"name": d["name"], "value": d["mode"]} for d in voices] def _get_model_default_voice(self, model: str, credentials: dict) -> Any: """ @@ -111,8 +124,10 @@ class TTSModel(AIModel): """ model_schema = self.get_model_schema(model, credentials) - if model_schema and ModelPropertyKey.AUDIO_TYPE in model_schema.model_properties: - return model_schema.model_properties[ModelPropertyKey.AUDIO_TYPE] + if not model_schema or ModelPropertyKey.AUDIO_TYPE not in model_schema.model_properties: + raise ValueError("this model does not support audio type") + + return model_schema.model_properties[ModelPropertyKey.AUDIO_TYPE] def _get_model_word_limit(self, model: str, credentials: dict) -> int: """ @@ -121,8 +136,10 @@ class TTSModel(AIModel): """ model_schema = self.get_model_schema(model, credentials) - if model_schema and ModelPropertyKey.WORD_LIMIT in model_schema.model_properties: - return model_schema.model_properties[ModelPropertyKey.WORD_LIMIT] + if not model_schema or ModelPropertyKey.WORD_LIMIT not in model_schema.model_properties: + raise ValueError("this model does not support word limit") + + return model_schema.model_properties[ModelPropertyKey.WORD_LIMIT] def _get_model_workers_limit(self, model: str, credentials: dict) -> int: """ @@ -131,8 +148,10 @@ class TTSModel(AIModel): """ model_schema = self.get_model_schema(model, credentials) - if model_schema and ModelPropertyKey.MAX_WORKERS in model_schema.model_properties: - return model_schema.model_properties[ModelPropertyKey.MAX_WORKERS] + if not model_schema or ModelPropertyKey.MAX_WORKERS not in model_schema.model_properties: + raise ValueError("this model does not support max workers") + + return model_schema.model_properties[ModelPropertyKey.MAX_WORKERS] @staticmethod def _split_text_into_sentences(org_text, max_length=2000, pattern=r"[。.!?]"): diff --git a/api/core/model_runtime/model_providers/anthropic/llm/_position.yaml b/api/core/model_runtime/model_providers/anthropic/llm/_position.yaml index 8394c4276a..aca9456313 100644 --- a/api/core/model_runtime/model_providers/anthropic/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/anthropic/llm/_position.yaml @@ -1,3 +1,4 @@ +- claude-3-5-sonnet-20241022 - claude-3-5-sonnet-20240620 - claude-3-haiku-20240307 - claude-3-opus-20240229 diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/llm/llm.py b/api/core/model_runtime/model_providers/azure_ai_studio/llm/llm.py index 516ef8b295..53030bad83 100644 --- a/api/core/model_runtime/model_providers/azure_ai_studio/llm/llm.py +++ b/api/core/model_runtime/model_providers/azure_ai_studio/llm/llm.py @@ -294,7 +294,7 @@ class AzureAIStudioLargeLanguageModel(LargeLanguageModel): ], } - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ Used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py b/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py index 6ed7ab277c..b6dfb20b66 100644 --- a/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py @@ -148,7 +148,7 @@ class AzureRerankModel(RerankModel): InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError, json.JSONDecodeError], } - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml b/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml index 093f57c51e..1ef5e83abc 100644 --- a/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml +++ b/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml @@ -53,6 +53,9 @@ model_credential_schema: type: select required: true options: + - label: + en_US: 2024-10-01-preview + value: 2024-10-01-preview - label: en_US: 2024-09-01-preview value: 2024-09-01-preview diff --git a/api/core/model_runtime/model_providers/azure_openai/llm/llm.py b/api/core/model_runtime/model_providers/azure_openai/llm/llm.py index 4f46fb81f8..1cd4823e13 100644 --- a/api/core/model_runtime/model_providers/azure_openai/llm/llm.py +++ b/api/core/model_runtime/model_providers/azure_openai/llm/llm.py @@ -45,9 +45,7 @@ class AzureOpenAILargeLanguageModel(_CommonAzureOpenAI, LargeLanguageModel): stream: bool = True, user: Optional[str] = None, ) -> Union[LLMResult, Generator]: - base_model_name = credentials.get("base_model_name") - if not base_model_name: - raise ValueError("Base Model Name is required") + base_model_name = self._get_base_model_name(credentials) ai_model_entity = self._get_ai_model_entity(base_model_name=base_model_name, model=model) if ai_model_entity and ai_model_entity.entity.model_properties.get(ModelPropertyKey.MODE) == LLMMode.CHAT.value: @@ -81,9 +79,7 @@ class AzureOpenAILargeLanguageModel(_CommonAzureOpenAI, LargeLanguageModel): prompt_messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None, ) -> int: - base_model_name = credentials.get("base_model_name") - if not base_model_name: - raise ValueError("Base Model Name is required") + base_model_name = self._get_base_model_name(credentials) model_entity = self._get_ai_model_entity(base_model_name=base_model_name, model=model) if not model_entity: raise ValueError(f"Base Model Name {base_model_name} is invalid") @@ -108,9 +104,7 @@ class AzureOpenAILargeLanguageModel(_CommonAzureOpenAI, LargeLanguageModel): if "base_model_name" not in credentials: raise CredentialsValidateFailedError("Base Model Name is required") - base_model_name = credentials.get("base_model_name") - if not base_model_name: - raise CredentialsValidateFailedError("Base Model Name is required") + base_model_name = self._get_base_model_name(credentials) ai_model_entity = self._get_ai_model_entity(base_model_name=base_model_name, model=model) if not ai_model_entity: @@ -149,9 +143,7 @@ class AzureOpenAILargeLanguageModel(_CommonAzureOpenAI, LargeLanguageModel): raise CredentialsValidateFailedError(str(ex)) def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: - base_model_name = credentials.get("base_model_name") - if not base_model_name: - raise ValueError("Base Model Name is required") + base_model_name = self._get_base_model_name(credentials) ai_model_entity = self._get_ai_model_entity(base_model_name=base_model_name, model=model) return ai_model_entity.entity if ai_model_entity else None @@ -308,11 +300,6 @@ class AzureOpenAILargeLanguageModel(_CommonAzureOpenAI, LargeLanguageModel): if tools: extra_model_kwargs["tools"] = [helper.dump_model(PromptMessageFunction(function=tool)) for tool in tools] - # extra_model_kwargs['functions'] = [{ - # "name": tool.name, - # "description": tool.description, - # "parameters": tool.parameters - # } for tool in tools] if stop: extra_model_kwargs["stop"] = stop @@ -769,3 +756,9 @@ class AzureOpenAILargeLanguageModel(_CommonAzureOpenAI, LargeLanguageModel): ai_model_entity_copy.entity.label.en_US = model ai_model_entity_copy.entity.label.zh_Hans = model return ai_model_entity_copy + + def _get_base_model_name(self, credentials: dict) -> str: + base_model_name = credentials.get("base_model_name") + if not base_model_name: + raise ValueError("Base Model Name is required") + return base_model_name diff --git a/api/core/model_runtime/model_providers/groq/groq.yaml b/api/core/model_runtime/model_providers/groq/groq.yaml index db17cc8bdd..d6534e1bf1 100644 --- a/api/core/model_runtime/model_providers/groq/groq.yaml +++ b/api/core/model_runtime/model_providers/groq/groq.yaml @@ -18,6 +18,7 @@ help: en_US: https://console.groq.com/ supported_model_types: - llm + - speech2text configurate_methods: - predefined-model provider_credential_schema: diff --git a/api/core/model_runtime/model_providers/huggingface_tei/rerank/rerank.py b/api/core/model_runtime/model_providers/huggingface_tei/rerank/rerank.py index 74a1dfc3ff..0bb9a9c8b5 100644 --- a/api/core/model_runtime/model_providers/huggingface_tei/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/huggingface_tei/rerank/rerank.py @@ -118,7 +118,7 @@ class HuggingfaceTeiRerankModel(RerankModel): InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], } - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/text_embedding.py index 6b43934538..a0917630a9 100644 --- a/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/text_embedding.py +++ b/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/text_embedding.py @@ -189,7 +189,7 @@ class HuggingfaceTeiTextEmbeddingModel(TextEmbeddingModel): return usage - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/localai/llm/llm.py b/api/core/model_runtime/model_providers/localai/llm/llm.py index e7295355f6..756c5571d4 100644 --- a/api/core/model_runtime/model_providers/localai/llm/llm.py +++ b/api/core/model_runtime/model_providers/localai/llm/llm.py @@ -1,5 +1,5 @@ from collections.abc import Generator -from typing import cast +from typing import Optional, cast from httpx import Timeout from openai import ( @@ -212,7 +212,7 @@ class LocalAILanguageModel(LargeLanguageModel): except Exception as ex: raise CredentialsValidateFailedError(f"Invalid credentials {str(ex)}") - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: completion_model = None if credentials["completion_type"] == "chat_completion": completion_model = LLMMode.CHAT.value diff --git a/api/core/model_runtime/model_providers/localai/speech2text/speech2text.py b/api/core/model_runtime/model_providers/localai/speech2text/speech2text.py index 4b9d0f5bfe..260a35d9d4 100644 --- a/api/core/model_runtime/model_providers/localai/speech2text/speech2text.py +++ b/api/core/model_runtime/model_providers/localai/speech2text/speech2text.py @@ -73,7 +73,7 @@ class LocalAISpeech2text(Speech2TextModel): InvokeBadRequestError: [InvokeBadRequestError], } - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/localai/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/localai/text_embedding/text_embedding.py index b4dfc1a4de..0111c3362a 100644 --- a/api/core/model_runtime/model_providers/localai/text_embedding/text_embedding.py +++ b/api/core/model_runtime/model_providers/localai/text_embedding/text_embedding.py @@ -115,7 +115,7 @@ class LocalAITextEmbeddingModel(TextEmbeddingModel): num_tokens += self._get_num_tokens_by_gpt2(text) return num_tokens - def _get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def _get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ Get customizable model schema diff --git a/api/core/model_runtime/model_providers/moonshot/llm/llm.py b/api/core/model_runtime/model_providers/moonshot/llm/llm.py index 3ea46c2967..5c955c86d3 100644 --- a/api/core/model_runtime/model_providers/moonshot/llm/llm.py +++ b/api/core/model_runtime/model_providers/moonshot/llm/llm.py @@ -44,13 +44,16 @@ class MoonshotLargeLanguageModel(OAIAPICompatLargeLanguageModel): self._add_custom_parameters(credentials) self._add_function_call(model, credentials) user = user[:32] if user else None + # {"response_format": "json_object"} need convert to {"response_format": {"type": "json_object"}} + if "response_format" in model_parameters: + model_parameters["response_format"] = {"type": model_parameters.get("response_format")} return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) def validate_credentials(self, model: str, credentials: dict) -> None: self._add_custom_parameters(credentials) super().validate_credentials(model, credentials) - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: return AIModelEntity( model=model, label=I18nObject(en_US=model, zh_Hans=model), diff --git a/api/core/model_runtime/model_providers/openai/llm/_position.yaml b/api/core/model_runtime/model_providers/openai/llm/_position.yaml index 7501bc1164..b7c25ecb16 100644 --- a/api/core/model_runtime/model_providers/openai/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/openai/llm/_position.yaml @@ -1,3 +1,4 @@ +- gpt-4o-audio-preview - gpt-4 - gpt-4o - gpt-4o-2024-05-13 diff --git a/api/core/model_runtime/model_providers/openai/llm/llm.py b/api/core/model_runtime/model_providers/openai/llm/llm.py index 1ac3837ad3..922e5e1314 100644 --- a/api/core/model_runtime/model_providers/openai/llm/llm.py +++ b/api/core/model_runtime/model_providers/openai/llm/llm.py @@ -1,7 +1,7 @@ import json import logging from collections.abc import Generator -from typing import Optional, Union, cast +from typing import Any, Optional, Union, cast import tiktoken from openai import OpenAI, Stream @@ -11,9 +11,9 @@ from openai.types.chat.chat_completion_chunk import ChoiceDeltaFunctionCall, Cho from openai.types.chat.chat_completion_message import FunctionCall from core.model_runtime.callbacks.base_callback import Callback -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta -from core.model_runtime.entities.message_entities import ( +from core.model_runtime.entities import ( AssistantPromptMessage, + AudioPromptMessageContent, ImagePromptMessageContent, PromptMessage, PromptMessageContentType, @@ -23,6 +23,7 @@ from core.model_runtime.entities.message_entities import ( ToolPromptMessage, UserPromptMessage, ) +from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, I18nObject, ModelType, PriceConfig from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel @@ -613,6 +614,7 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel): # clear illegal prompt messages prompt_messages = self._clear_illegal_prompt_messages(model, prompt_messages) + # o1 compatibility block_as_stream = False if model.startswith("o1"): if stream: @@ -626,8 +628,9 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel): del extra_model_kwargs["stop"] # chat model + messages: Any = [self._convert_prompt_message_to_dict(m) for m in prompt_messages] response = client.chat.completions.create( - messages=[self._convert_prompt_message_to_dict(m) for m in prompt_messages], + messages=messages, model=model, stream=stream, **model_parameters, @@ -946,23 +949,29 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel): Convert PromptMessage to dict for OpenAI API """ if isinstance(message, UserPromptMessage): - message = cast(UserPromptMessage, message) if isinstance(message.content, str): message_dict = {"role": "user", "content": message.content} - else: + elif isinstance(message.content, list): sub_messages = [] for message_content in message.content: - if message_content.type == PromptMessageContentType.TEXT: - message_content = cast(TextPromptMessageContent, message_content) + if isinstance(message_content, TextPromptMessageContent): sub_message_dict = {"type": "text", "text": message_content.data} sub_messages.append(sub_message_dict) - elif message_content.type == PromptMessageContentType.IMAGE: - message_content = cast(ImagePromptMessageContent, message_content) + elif isinstance(message_content, ImagePromptMessageContent): sub_message_dict = { "type": "image_url", "image_url": {"url": message_content.data, "detail": message_content.detail.value}, } sub_messages.append(sub_message_dict) + elif isinstance(message_content, AudioPromptMessageContent): + sub_message_dict = { + "type": "input_audio", + "input_audio": { + "data": message_content.data, + "format": message_content.format, + }, + } + sub_messages.append(sub_message_dict) message_dict = {"role": "user", "content": sub_messages} elif isinstance(message, AssistantPromptMessage): diff --git a/api/core/model_runtime/model_providers/openai/speech2text/speech2text.py b/api/core/model_runtime/model_providers/openai/speech2text/speech2text.py index 0d54d2ea9a..ecf455c6fd 100644 --- a/api/core/model_runtime/model_providers/openai/speech2text/speech2text.py +++ b/api/core/model_runtime/model_providers/openai/speech2text/speech2text.py @@ -61,7 +61,7 @@ class OpenAISpeech2TextModel(_CommonOpenAI, Speech2TextModel): return response.text - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py b/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py index 356ac56b1e..e1342fe985 100644 --- a/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py +++ b/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py @@ -397,16 +397,21 @@ class OAIAPICompatLargeLanguageModel(_CommonOaiApiCompat, LargeLanguageModel): chunk_index = 0 def create_final_llm_result_chunk( - index: int, message: AssistantPromptMessage, finish_reason: str + id: Optional[str], index: int, message: AssistantPromptMessage, finish_reason: str, usage: dict ) -> LLMResultChunk: # calculate num tokens - prompt_tokens = self._num_tokens_from_string(model, prompt_messages[0].content) - completion_tokens = self._num_tokens_from_string(model, full_assistant_content) + prompt_tokens = usage and usage.get("prompt_tokens") + if prompt_tokens is None: + prompt_tokens = self._num_tokens_from_string(model, prompt_messages[0].content) + completion_tokens = usage and usage.get("completion_tokens") + if completion_tokens is None: + completion_tokens = self._num_tokens_from_string(model, full_assistant_content) # transform usage usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens) return LLMResultChunk( + id=id, model=model, prompt_messages=prompt_messages, delta=LLMResultChunkDelta(index=index, message=message, finish_reason=finish_reason, usage=usage), @@ -450,7 +455,7 @@ class OAIAPICompatLargeLanguageModel(_CommonOaiApiCompat, LargeLanguageModel): tool_call.function.arguments += new_tool_call.function.arguments finish_reason = None # The default value of finish_reason is None - + message_id, usage = None, None for chunk in response.iter_lines(decode_unicode=True, delimiter=delimiter): chunk = chunk.strip() if chunk: @@ -462,20 +467,26 @@ class OAIAPICompatLargeLanguageModel(_CommonOaiApiCompat, LargeLanguageModel): continue try: - chunk_json = json.loads(decoded_chunk) + chunk_json: dict = json.loads(decoded_chunk) # stream ended except json.JSONDecodeError as e: yield create_final_llm_result_chunk( + id=message_id, index=chunk_index + 1, message=AssistantPromptMessage(content=""), finish_reason="Non-JSON encountered.", + usage=usage, ) break + if chunk_json: + if u := chunk_json.get("usage"): + usage = u if not chunk_json or len(chunk_json["choices"]) == 0: continue choice = chunk_json["choices"][0] finish_reason = chunk_json["choices"][0].get("finish_reason") + message_id = chunk_json.get("id") chunk_index += 1 if "delta" in choice: @@ -524,6 +535,7 @@ class OAIAPICompatLargeLanguageModel(_CommonOaiApiCompat, LargeLanguageModel): continue yield LLMResultChunk( + id=message_id, model=model, prompt_messages=prompt_messages, delta=LLMResultChunkDelta( @@ -536,6 +548,7 @@ class OAIAPICompatLargeLanguageModel(_CommonOaiApiCompat, LargeLanguageModel): if tools_calls: yield LLMResultChunk( + id=message_id, model=model, prompt_messages=prompt_messages, delta=LLMResultChunkDelta( @@ -545,17 +558,22 @@ class OAIAPICompatLargeLanguageModel(_CommonOaiApiCompat, LargeLanguageModel): ) yield create_final_llm_result_chunk( - index=chunk_index, message=AssistantPromptMessage(content=""), finish_reason=finish_reason + id=message_id, + index=chunk_index, + message=AssistantPromptMessage(content=""), + finish_reason=finish_reason, + usage=usage, ) def _handle_generate_response( self, model: str, credentials: dict, response: requests.Response, prompt_messages: list[PromptMessage] ) -> LLMResult: - response_json = response.json() + response_json: dict = response.json() completion_type = LLMMode.value_of(credentials["mode"]) output = response_json["choices"][0] + message_id = response_json.get("id") response_content = "" tool_calls = None @@ -593,6 +611,7 @@ class OAIAPICompatLargeLanguageModel(_CommonOaiApiCompat, LargeLanguageModel): # transform response result = LLMResult( + id=message_id, model=response_json["model"], prompt_messages=prompt_messages, message=assistant_message, diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/openai_api_compatible.yaml b/api/core/model_runtime/model_providers/openai_api_compatible/openai_api_compatible.yaml index 88c76fe16e..69a081f35c 100644 --- a/api/core/model_runtime/model_providers/openai_api_compatible/openai_api_compatible.yaml +++ b/api/core/model_runtime/model_providers/openai_api_compatible/openai_api_compatible.yaml @@ -8,6 +8,7 @@ supported_model_types: - llm - text-embedding - speech2text + - rerank configurate_methods: - customizable-model model_credential_schema: @@ -83,6 +84,19 @@ model_credential_schema: placeholder: zh_Hans: 在此输入您的模型上下文长度 en_US: Enter your Model context size + - variable: context_size + label: + zh_Hans: 模型上下文长度 + en_US: Model context size + required: true + show_on: + - variable: __model_type + value: rerank + type: text-input + default: '4096' + placeholder: + zh_Hans: 在此输入您的模型上下文长度 + en_US: Enter your Model context size - variable: max_tokens_to_sample label: zh_Hans: 最大 token 上限 diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/speech2text.py b/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/speech2text.py index cef77cc941..a490537e51 100644 --- a/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/speech2text.py +++ b/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/speech2text.py @@ -62,7 +62,7 @@ class OAICompatSpeech2TextModel(_CommonOaiApiCompat, Speech2TextModel): except Exception as ex: raise CredentialsValidateFailedError(str(ex)) - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/openllm/llm/llm.py b/api/core/model_runtime/model_providers/openllm/llm/llm.py index 34b4de7962..e5ecc748fb 100644 --- a/api/core/model_runtime/model_providers/openllm/llm/llm.py +++ b/api/core/model_runtime/model_providers/openllm/llm/llm.py @@ -1,4 +1,5 @@ from collections.abc import Generator +from typing import Optional from core.model_runtime.entities.common_entities import I18nObject from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta @@ -193,7 +194,7 @@ class OpenLLMLargeLanguageModel(LargeLanguageModel): ), ) - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/sagemaker/llm/llm.py b/api/core/model_runtime/model_providers/sagemaker/llm/llm.py index 97b7692044..dd53914a69 100644 --- a/api/core/model_runtime/model_providers/sagemaker/llm/llm.py +++ b/api/core/model_runtime/model_providers/sagemaker/llm/llm.py @@ -408,7 +408,7 @@ class SageMakerLargeLanguageModel(LargeLanguageModel): InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], } - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py b/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py index 959dff6a21..49c3fa5921 100644 --- a/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py @@ -157,7 +157,7 @@ class SageMakerRerankModel(RerankModel): InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], } - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py b/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py index 94bae71e53..8fdf68abe1 100644 --- a/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py +++ b/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py @@ -111,7 +111,7 @@ class SageMakerSpeech2TextModel(Speech2TextModel): InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], } - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py index ae7d805b4e..ececfda11a 100644 --- a/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py +++ b/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py @@ -180,7 +180,7 @@ class SageMakerEmbeddingModel(TextEmbeddingModel): InvokeBadRequestError: [KeyError], } - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/sagemaker/tts/tts.py b/api/core/model_runtime/model_providers/sagemaker/tts/tts.py index 1a5afd18f9..6a5946453b 100644 --- a/api/core/model_runtime/model_providers/sagemaker/tts/tts.py +++ b/api/core/model_runtime/model_providers/sagemaker/tts/tts.py @@ -159,7 +159,7 @@ class SageMakerText2SpeechModel(TTSModel): return self._tts_invoke_streaming(model_type, payload, sagemaker_endpoint) - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/llm.py b/api/core/model_runtime/model_providers/siliconflow/llm/llm.py index 4f8f4e0f61..6015442c2b 100644 --- a/api/core/model_runtime/model_providers/siliconflow/llm/llm.py +++ b/api/core/model_runtime/model_providers/siliconflow/llm/llm.py @@ -40,7 +40,7 @@ class SiliconflowLargeLanguageModel(OAIAPICompatLargeLanguageModel): credentials["mode"] = "chat" credentials["endpoint_url"] = "https://api.siliconflow.cn/v1" - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: return AIModelEntity( model=model, label=I18nObject(en_US=model, zh_Hans=model), diff --git a/api/core/model_runtime/model_providers/stepfun/llm/llm.py b/api/core/model_runtime/model_providers/stepfun/llm/llm.py index dab666e4d0..43b91a1aec 100644 --- a/api/core/model_runtime/model_providers/stepfun/llm/llm.py +++ b/api/core/model_runtime/model_providers/stepfun/llm/llm.py @@ -50,7 +50,7 @@ class StepfunLargeLanguageModel(OAIAPICompatLargeLanguageModel): self._add_custom_parameters(credentials) super().validate_credentials(model, credentials) - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: return AIModelEntity( model=model, label=I18nObject(en_US=model, zh_Hans=model), diff --git a/api/core/model_runtime/model_providers/tongyi/llm/llm.py b/api/core/model_runtime/model_providers/tongyi/llm/llm.py index 3e3585b30a..3a1bb75a59 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/llm.py +++ b/api/core/model_runtime/model_providers/tongyi/llm/llm.py @@ -535,7 +535,7 @@ class TongyiLargeLanguageModel(LargeLanguageModel): ], } - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ Architecture for defining customizable models diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-longcontext.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-longcontext.yaml index 098494ff95..cc0bb7a117 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-longcontext.yaml +++ b/api/core/model_runtime/model_providers/tongyi/llm/qwen-max-longcontext.yaml @@ -76,3 +76,4 @@ pricing: output: '0.12' unit: '0.001' currency: RMB +deprecated: true diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-max.yaml index 9d0d3f8db3..4af4822e86 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-max.yaml +++ b/api/core/model_runtime/model_providers/tongyi/llm/qwen-max.yaml @@ -10,7 +10,7 @@ features: - stream-tool-call model_properties: mode: chat - context_size: 8000 + context_size: 32000 parameter_rules: - name: temperature use_template: temperature @@ -26,7 +26,7 @@ parameter_rules: type: int default: 2000 min: 1 - max: 2000 + max: 8192 help: zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus.yaml index 9089e57255..529a29b1b5 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus.yaml +++ b/api/core/model_runtime/model_providers/tongyi/llm/qwen-plus.yaml @@ -10,7 +10,7 @@ features: - stream-tool-call model_properties: mode: chat - context_size: 131072 + context_size: 128000 parameter_rules: - name: temperature use_template: temperature diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo.yaml index 215c9ec5fc..a0c4ba6820 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo.yaml +++ b/api/core/model_runtime/model_providers/tongyi/llm/qwen-turbo.yaml @@ -10,7 +10,7 @@ features: - stream-tool-call model_properties: mode: chat - context_size: 8000 + context_size: 128000 parameter_rules: - name: temperature use_template: temperature @@ -26,7 +26,7 @@ parameter_rules: type: int default: 2000 min: 1 - max: 2000 + max: 8192 help: zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. diff --git a/api/core/model_runtime/model_providers/triton_inference_server/llm/llm.py b/api/core/model_runtime/model_providers/triton_inference_server/llm/llm.py index cf7e3f14be..47a4b99214 100644 --- a/api/core/model_runtime/model_providers/triton_inference_server/llm/llm.py +++ b/api/core/model_runtime/model_providers/triton_inference_server/llm/llm.py @@ -1,4 +1,5 @@ from collections.abc import Generator +from typing import Optional from httpx import Response, post from yarl import URL @@ -109,7 +110,7 @@ class TritonInferenceAILargeLanguageModel(LargeLanguageModel): raise NotImplementedError(f"PromptMessage type {type(item)} is not supported") return text - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py b/api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py index dec6c9d789..1c776cec7e 100644 --- a/api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py +++ b/api/core/model_runtime/model_providers/volcengine_maas/llm/llm.py @@ -1,5 +1,6 @@ import logging from collections.abc import Generator +from typing import Optional from volcenginesdkarkruntime.types.chat import ChatCompletion, ChatCompletionChunk @@ -298,7 +299,7 @@ class VolcengineMaaSLargeLanguageModel(LargeLanguageModel): chunks = client.stream_chat(prompt_messages, **req_params) return _handle_stream_chat_response(chunks) - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/wenxin/rerank/rerank.py b/api/core/model_runtime/model_providers/wenxin/rerank/rerank.py index b22aead22b..9e6a7dd99e 100644 --- a/api/core/model_runtime/model_providers/wenxin/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/wenxin/rerank/rerank.py @@ -2,20 +2,15 @@ from typing import Optional import httpx -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) +from core.model_runtime.errors.invoke import InvokeError from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.rerank_model import RerankModel from core.model_runtime.model_providers.wenxin._common import _CommonWenxin +from core.model_runtime.model_providers.wenxin.wenxin_errors import ( + InternalServerError, + invoke_error_mapping, +) class WenxinRerank(_CommonWenxin): @@ -32,7 +27,7 @@ class WenxinRerank(_CommonWenxin): response.raise_for_status() return response.json() except httpx.HTTPStatusError as e: - raise InvokeServerUnavailableError(str(e)) + raise InternalServerError(str(e)) class WenxinRerankModel(RerankModel): @@ -93,7 +88,7 @@ class WenxinRerankModel(RerankModel): return RerankResult(model=model, docs=rerank_documents) except httpx.HTTPStatusError as e: - raise InvokeServerUnavailableError(str(e)) + raise InternalServerError(str(e)) def validate_credentials(self, model: str, credentials: dict) -> None: """ @@ -124,24 +119,4 @@ class WenxinRerankModel(RerankModel): """ Map model invoke error to unified error """ - return { - InvokeConnectionError: [httpx.ConnectError], - InvokeServerUnavailableError: [httpx.RemoteProtocolError], - InvokeRateLimitError: [], - InvokeAuthorizationError: [httpx.HTTPStatusError], - InvokeBadRequestError: [httpx.RequestError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.RERANK, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size"))}, - ) - - return entity + return invoke_error_mapping() diff --git a/api/core/model_runtime/model_providers/xinference/llm/llm.py b/api/core/model_runtime/model_providers/xinference/llm/llm.py index 0c9d08679a..b82f0430c5 100644 --- a/api/core/model_runtime/model_providers/xinference/llm/llm.py +++ b/api/core/model_runtime/model_providers/xinference/llm/llm.py @@ -1,5 +1,5 @@ from collections.abc import Generator, Iterator -from typing import cast +from typing import Optional, cast from openai import ( APIConnectionError, @@ -321,7 +321,7 @@ class XinferenceAILargeLanguageModel(LargeLanguageModel): return message_dict - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/xinference/rerank/rerank.py b/api/core/model_runtime/model_providers/xinference/rerank/rerank.py index 6368cd76dc..efaf114854 100644 --- a/api/core/model_runtime/model_providers/xinference/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/xinference/rerank/rerank.py @@ -142,7 +142,7 @@ class XinferenceRerankModel(RerankModel): InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], } - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/xinference/speech2text/speech2text.py b/api/core/model_runtime/model_providers/xinference/speech2text/speech2text.py index c5ad383911..3d7aefeb6d 100644 --- a/api/core/model_runtime/model_providers/xinference/speech2text/speech2text.py +++ b/api/core/model_runtime/model_providers/xinference/speech2text/speech2text.py @@ -129,7 +129,7 @@ class XinferenceSpeech2TextModel(Speech2TextModel): return response["text"] - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/xinference/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/xinference/text_embedding/text_embedding.py index f64b9c50af..e51e6a941c 100644 --- a/api/core/model_runtime/model_providers/xinference/text_embedding/text_embedding.py +++ b/api/core/model_runtime/model_providers/xinference/text_embedding/text_embedding.py @@ -184,7 +184,7 @@ class XinferenceTextEmbeddingModel(TextEmbeddingModel): return usage - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/xinference/tts/tts.py b/api/core/model_runtime/model_providers/xinference/tts/tts.py index 3f46b50c33..ad7b64efb5 100644 --- a/api/core/model_runtime/model_providers/xinference/tts/tts.py +++ b/api/core/model_runtime/model_providers/xinference/tts/tts.py @@ -116,7 +116,7 @@ class XinferenceText2SpeechModel(TTSModel): """ return self._tts_invoke_streaming(model, credentials, content_text, voice) - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: """ used to define customizable model schema """ diff --git a/api/core/model_runtime/model_providers/yi/llm/llm.py b/api/core/model_runtime/model_providers/yi/llm/llm.py index 5ab7fd126e..0642e72ed5 100644 --- a/api/core/model_runtime/model_providers/yi/llm/llm.py +++ b/api/core/model_runtime/model_providers/yi/llm/llm.py @@ -4,12 +4,22 @@ from urllib.parse import urlparse import tiktoken -from core.model_runtime.entities.llm_entities import LLMResult +from core.model_runtime.entities.common_entities import I18nObject +from core.model_runtime.entities.llm_entities import LLMMode, LLMResult from core.model_runtime.entities.message_entities import ( PromptMessage, PromptMessageTool, SystemPromptMessage, ) +from core.model_runtime.entities.model_entities import ( + AIModelEntity, + FetchFrom, + ModelFeature, + ModelPropertyKey, + ModelType, + ParameterRule, + ParameterType, +) from core.model_runtime.model_providers.openai.llm.llm import OpenAILargeLanguageModel @@ -125,3 +135,58 @@ class YiLargeLanguageModel(OpenAILargeLanguageModel): else: parsed_url = urlparse(credentials["endpoint_url"]) credentials["openai_api_base"] = f"{parsed_url.scheme}://{parsed_url.netloc}" + + def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + return AIModelEntity( + model=model, + label=I18nObject(en_US=model, zh_Hans=model), + model_type=ModelType.LLM, + features=[ModelFeature.TOOL_CALL, ModelFeature.MULTI_TOOL_CALL, ModelFeature.STREAM_TOOL_CALL] + if credentials.get("function_calling_type") == "tool_call" + else [], + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + model_properties={ + ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", 8000)), + ModelPropertyKey.MODE: LLMMode.CHAT.value, + }, + parameter_rules=[ + ParameterRule( + name="temperature", + use_template="temperature", + label=I18nObject(en_US="Temperature", zh_Hans="温度"), + type=ParameterType.FLOAT, + ), + ParameterRule( + name="max_tokens", + use_template="max_tokens", + default=512, + min=1, + max=int(credentials.get("max_tokens", 8192)), + label=I18nObject( + en_US="Max Tokens", zh_Hans="指定生成结果长度的上限。如果生成结果截断,可以调大该参数" + ), + type=ParameterType.INT, + ), + ParameterRule( + name="top_p", + use_template="top_p", + label=I18nObject( + en_US="Top P", + zh_Hans="控制生成结果的随机性。数值越小,随机性越弱;数值越大,随机性越强。", + ), + type=ParameterType.FLOAT, + ), + ParameterRule( + name="top_k", + use_template="top_k", + label=I18nObject(en_US="Top K", zh_Hans="取样数量"), + type=ParameterType.FLOAT, + ), + ParameterRule( + name="frequency_penalty", + use_template="frequency_penalty", + label=I18nObject(en_US="Frequency Penalty", zh_Hans="重复惩罚"), + type=ParameterType.FLOAT, + ), + ], + ) diff --git a/api/core/model_runtime/model_providers/yi/yi.yaml b/api/core/model_runtime/model_providers/yi/yi.yaml index de741afb10..393526c31e 100644 --- a/api/core/model_runtime/model_providers/yi/yi.yaml +++ b/api/core/model_runtime/model_providers/yi/yi.yaml @@ -20,6 +20,7 @@ supported_model_types: - llm configurate_methods: - predefined-model + - customizable-model provider_credential_schema: credential_form_schemas: - variable: api_key @@ -39,3 +40,57 @@ provider_credential_schema: placeholder: zh_Hans: Base URL, e.g. https://api.lingyiwanwu.com/v1 en_US: Base URL, e.g. https://api.lingyiwanwu.com/v1 +model_credential_schema: + model: + label: + en_US: Model Name + zh_Hans: 模型名称 + placeholder: + en_US: Enter your model name + zh_Hans: 输入模型名称 + credential_form_schemas: + - variable: api_key + label: + en_US: API Key + type: secret-input + required: true + placeholder: + zh_Hans: 在此输入您的 API Key + en_US: Enter your API Key + - variable: context_size + label: + zh_Hans: 模型上下文长度 + en_US: Model context size + required: true + type: text-input + default: '4096' + placeholder: + zh_Hans: 在此输入您的模型上下文长度 + en_US: Enter your Model context size + - variable: max_tokens + label: + zh_Hans: 最大 token 上限 + en_US: Upper bound for max tokens + default: '4096' + type: text-input + show_on: + - variable: __model_type + value: llm + - variable: function_calling_type + label: + en_US: Function calling + type: select + required: false + default: no_call + options: + - value: no_call + label: + en_US: Not Support + zh_Hans: 不支持 + - value: function_call + label: + en_US: Support + zh_Hans: 支持 + show_on: + - variable: __model_type + value: llm diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 0200f4a32d..764944f799 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -358,8 +358,8 @@ class TraceTask: workflow_run_id = workflow_run.id workflow_run_elapsed_time = workflow_run.elapsed_time workflow_run_status = workflow_run.status - workflow_run_inputs = json.loads(workflow_run.inputs) if workflow_run.inputs else {} - workflow_run_outputs = json.loads(workflow_run.outputs) if workflow_run.outputs else {} + workflow_run_inputs = workflow_run.inputs_dict + workflow_run_outputs = workflow_run.outputs_dict workflow_run_version = workflow_run.version error = workflow_run.error or "" diff --git a/api/core/prompt/advanced_prompt_transform.py b/api/core/prompt/advanced_prompt_transform.py index ce8038d14e..bbd9531b19 100644 --- a/api/core/prompt/advanced_prompt_transform.py +++ b/api/core/prompt/advanced_prompt_transform.py @@ -1,12 +1,15 @@ -from typing import Optional, Union +from collections.abc import Sequence +from typing import Optional from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity -from core.file.file_obj import FileVar +from core.file import file_manager +from core.file.models import File from core.helper.code_executor.jinja2.jinja2_formatter import Jinja2Formatter from core.memory.token_buffer_memory import TokenBufferMemory -from core.model_runtime.entities.message_entities import ( +from core.model_runtime.entities import ( AssistantPromptMessage, PromptMessage, + PromptMessageContent, PromptMessageRole, SystemPromptMessage, TextPromptMessageContent, @@ -14,8 +17,8 @@ from core.model_runtime.entities.message_entities import ( ) from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate, MemoryConfig from core.prompt.prompt_transform import PromptTransform -from core.prompt.simple_prompt_transform import ModelMode from core.prompt.utils.prompt_template_parser import PromptTemplateParser +from core.workflow.entities.variable_pool import VariablePool class AdvancedPromptTransform(PromptTransform): @@ -28,22 +31,19 @@ class AdvancedPromptTransform(PromptTransform): def get_prompt( self, - prompt_template: Union[list[ChatModelMessage], CompletionModelPromptTemplate], - inputs: dict, + *, + prompt_template: Sequence[ChatModelMessage] | CompletionModelPromptTemplate, + inputs: dict[str, str], query: str, - files: list[FileVar], + files: Sequence[File], context: Optional[str], memory_config: Optional[MemoryConfig], memory: Optional[TokenBufferMemory], model_config: ModelConfigWithCredentialsEntity, - query_prompt_template: Optional[str] = None, ) -> list[PromptMessage]: - inputs = {key: str(value) for key, value in inputs.items()} - prompt_messages = [] - model_mode = ModelMode.value_of(model_config.mode) - if model_mode == ModelMode.COMPLETION: + if isinstance(prompt_template, CompletionModelPromptTemplate): prompt_messages = self._get_completion_model_prompt_messages( prompt_template=prompt_template, inputs=inputs, @@ -54,12 +54,11 @@ class AdvancedPromptTransform(PromptTransform): memory=memory, model_config=model_config, ) - elif model_mode == ModelMode.CHAT: + elif isinstance(prompt_template, list) and all(isinstance(item, ChatModelMessage) for item in prompt_template): prompt_messages = self._get_chat_model_prompt_messages( prompt_template=prompt_template, inputs=inputs, query=query, - query_prompt_template=query_prompt_template, files=files, context=context, memory_config=memory_config, @@ -74,7 +73,7 @@ class AdvancedPromptTransform(PromptTransform): prompt_template: CompletionModelPromptTemplate, inputs: dict, query: Optional[str], - files: list[FileVar], + files: Sequence[File], context: Optional[str], memory_config: Optional[MemoryConfig], memory: Optional[TokenBufferMemory], @@ -88,10 +87,10 @@ class AdvancedPromptTransform(PromptTransform): prompt_messages = [] if prompt_template.edition_type == "basic" or not prompt_template.edition_type: - prompt_template = PromptTemplateParser(template=raw_prompt, with_variable_tmpl=self.with_variable_tmpl) - prompt_inputs = {k: inputs[k] for k in prompt_template.variable_keys if k in inputs} + parser = PromptTemplateParser(template=raw_prompt, with_variable_tmpl=self.with_variable_tmpl) + prompt_inputs = {k: inputs[k] for k in parser.variable_keys if k in inputs} - prompt_inputs = self._set_context_variable(context, prompt_template, prompt_inputs) + prompt_inputs = self._set_context_variable(context, parser, prompt_inputs) if memory and memory_config: role_prefix = memory_config.role_prefix @@ -100,15 +99,15 @@ class AdvancedPromptTransform(PromptTransform): memory_config=memory_config, raw_prompt=raw_prompt, role_prefix=role_prefix, - prompt_template=prompt_template, + parser=parser, prompt_inputs=prompt_inputs, model_config=model_config, ) if query: - prompt_inputs = self._set_query_variable(query, prompt_template, prompt_inputs) + prompt_inputs = self._set_query_variable(query, parser, prompt_inputs) - prompt = prompt_template.format(prompt_inputs) + prompt = parser.format(prompt_inputs) else: prompt = raw_prompt prompt_inputs = inputs @@ -116,9 +115,10 @@ class AdvancedPromptTransform(PromptTransform): prompt = Jinja2Formatter.format(prompt, prompt_inputs) if files: - prompt_message_contents = [TextPromptMessageContent(data=prompt)] + prompt_message_contents: list[PromptMessageContent] = [] + prompt_message_contents.append(TextPromptMessageContent(data=prompt)) for file in files: - prompt_message_contents.append(file.prompt_message_content) + prompt_message_contents.append(file_manager.to_prompt_message_content(file)) prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) else: @@ -131,35 +131,38 @@ class AdvancedPromptTransform(PromptTransform): prompt_template: list[ChatModelMessage], inputs: dict, query: Optional[str], - files: list[FileVar], + files: Sequence[File], context: Optional[str], memory_config: Optional[MemoryConfig], memory: Optional[TokenBufferMemory], model_config: ModelConfigWithCredentialsEntity, - query_prompt_template: Optional[str] = None, ) -> list[PromptMessage]: """ Get chat model prompt messages. """ - raw_prompt_list = prompt_template - prompt_messages = [] - - for prompt_item in raw_prompt_list: + for prompt_item in prompt_template: raw_prompt = prompt_item.text if prompt_item.edition_type == "basic" or not prompt_item.edition_type: - prompt_template = PromptTemplateParser(template=raw_prompt, with_variable_tmpl=self.with_variable_tmpl) - prompt_inputs = {k: inputs[k] for k in prompt_template.variable_keys if k in inputs} - - prompt_inputs = self._set_context_variable(context, prompt_template, prompt_inputs) - - prompt = prompt_template.format(prompt_inputs) + if self.with_variable_tmpl: + vp = VariablePool() + for k, v in inputs.items(): + if k.startswith("#"): + vp.add(k[1:-1].split("."), v) + raw_prompt = raw_prompt.replace("{{#context#}}", context or "") + prompt = vp.convert_template(raw_prompt).text + else: + parser = PromptTemplateParser(template=raw_prompt, with_variable_tmpl=self.with_variable_tmpl) + prompt_inputs = {k: inputs[k] for k in parser.variable_keys if k in inputs} + prompt_inputs = self._set_context_variable( + context=context, parser=parser, prompt_inputs=prompt_inputs + ) + prompt = parser.format(prompt_inputs) elif prompt_item.edition_type == "jinja2": prompt = raw_prompt prompt_inputs = inputs - - prompt = Jinja2Formatter.format(prompt, prompt_inputs) + prompt = Jinja2Formatter.format(template=prompt, inputs=prompt_inputs) else: raise ValueError(f"Invalid edition type: {prompt_item.edition_type}") @@ -170,25 +173,25 @@ class AdvancedPromptTransform(PromptTransform): elif prompt_item.role == PromptMessageRole.ASSISTANT: prompt_messages.append(AssistantPromptMessage(content=prompt)) - if query and query_prompt_template: - prompt_template = PromptTemplateParser( - template=query_prompt_template, with_variable_tmpl=self.with_variable_tmpl + if query and memory_config and memory_config.query_prompt_template: + parser = PromptTemplateParser( + template=memory_config.query_prompt_template, with_variable_tmpl=self.with_variable_tmpl ) - prompt_inputs = {k: inputs[k] for k in prompt_template.variable_keys if k in inputs} + prompt_inputs = {k: inputs[k] for k in parser.variable_keys if k in inputs} prompt_inputs["#sys.query#"] = query - prompt_inputs = self._set_context_variable(context, prompt_template, prompt_inputs) + prompt_inputs = self._set_context_variable(context, parser, prompt_inputs) - query = prompt_template.format(prompt_inputs) + query = parser.format(prompt_inputs) if memory and memory_config: prompt_messages = self._append_chat_histories(memory, memory_config, prompt_messages, model_config) - if files: - prompt_message_contents = [TextPromptMessageContent(data=query)] + if files and query is not None: + prompt_message_contents: list[PromptMessageContent] = [] + prompt_message_contents.append(TextPromptMessageContent(data=query)) for file in files: - prompt_message_contents.append(file.prompt_message_content) - + prompt_message_contents.append(file_manager.to_prompt_message_content(file)) prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) else: prompt_messages.append(UserPromptMessage(content=query)) @@ -200,19 +203,19 @@ class AdvancedPromptTransform(PromptTransform): # get last user message content and add files prompt_message_contents = [TextPromptMessageContent(data=last_message.content)] for file in files: - prompt_message_contents.append(file.prompt_message_content) + prompt_message_contents.append(file_manager.to_prompt_message_content(file)) last_message.content = prompt_message_contents else: prompt_message_contents = [TextPromptMessageContent(data="")] # not for query for file in files: - prompt_message_contents.append(file.prompt_message_content) + prompt_message_contents.append(file_manager.to_prompt_message_content(file)) prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) else: prompt_message_contents = [TextPromptMessageContent(data=query)] for file in files: - prompt_message_contents.append(file.prompt_message_content) + prompt_message_contents.append(file_manager.to_prompt_message_content(file)) prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) elif query: @@ -220,8 +223,8 @@ class AdvancedPromptTransform(PromptTransform): return prompt_messages - def _set_context_variable(self, context: str, prompt_template: PromptTemplateParser, prompt_inputs: dict) -> dict: - if "#context#" in prompt_template.variable_keys: + def _set_context_variable(self, context: str | None, parser: PromptTemplateParser, prompt_inputs: dict) -> dict: + if "#context#" in parser.variable_keys: if context: prompt_inputs["#context#"] = context else: @@ -229,8 +232,8 @@ class AdvancedPromptTransform(PromptTransform): return prompt_inputs - def _set_query_variable(self, query: str, prompt_template: PromptTemplateParser, prompt_inputs: dict) -> dict: - if "#query#" in prompt_template.variable_keys: + def _set_query_variable(self, query: str, parser: PromptTemplateParser, prompt_inputs: dict) -> dict: + if "#query#" in parser.variable_keys: if query: prompt_inputs["#query#"] = query else: @@ -244,16 +247,16 @@ class AdvancedPromptTransform(PromptTransform): memory_config: MemoryConfig, raw_prompt: str, role_prefix: MemoryConfig.RolePrefix, - prompt_template: PromptTemplateParser, + parser: PromptTemplateParser, prompt_inputs: dict, model_config: ModelConfigWithCredentialsEntity, ) -> dict: - if "#histories#" in prompt_template.variable_keys: + if "#histories#" in parser.variable_keys: if memory: inputs = {"#histories#": "", **prompt_inputs} - prompt_template = PromptTemplateParser(template=raw_prompt, with_variable_tmpl=self.with_variable_tmpl) - prompt_inputs = {k: inputs[k] for k in prompt_template.variable_keys if k in inputs} - tmp_human_message = UserPromptMessage(content=prompt_template.format(prompt_inputs)) + parser = PromptTemplateParser(template=raw_prompt, with_variable_tmpl=self.with_variable_tmpl) + prompt_inputs = {k: inputs[k] for k in parser.variable_keys if k in inputs} + tmp_human_message = UserPromptMessage(content=parser.format(prompt_inputs)) rest_tokens = self._calculate_rest_token([tmp_human_message], model_config) diff --git a/api/core/prompt/simple_prompt_transform.py b/api/core/prompt/simple_prompt_transform.py index 7479560520..5a3481b963 100644 --- a/api/core/prompt/simple_prompt_transform.py +++ b/api/core/prompt/simple_prompt_transform.py @@ -5,9 +5,11 @@ from typing import TYPE_CHECKING, Optional from core.app.app_config.entities import PromptTemplateEntity from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity +from core.file import file_manager from core.memory.token_buffer_memory import TokenBufferMemory from core.model_runtime.entities.message_entities import ( PromptMessage, + PromptMessageContent, SystemPromptMessage, TextPromptMessageContent, UserPromptMessage, @@ -18,10 +20,10 @@ from core.prompt.utils.prompt_template_parser import PromptTemplateParser from models.model import AppMode if TYPE_CHECKING: - from core.file.file_obj import FileVar + from core.file.models import File -class ModelMode(enum.Enum): +class ModelMode(str, enum.Enum): COMPLETION = "completion" CHAT = "chat" @@ -53,7 +55,7 @@ class SimplePromptTransform(PromptTransform): prompt_template_entity: PromptTemplateEntity, inputs: dict, query: str, - files: list["FileVar"], + files: list["File"], context: Optional[str], memory: Optional[TokenBufferMemory], model_config: ModelConfigWithCredentialsEntity, @@ -169,7 +171,7 @@ class SimplePromptTransform(PromptTransform): inputs: dict, query: str, context: Optional[str], - files: list["FileVar"], + files: list["File"], memory: Optional[TokenBufferMemory], model_config: ModelConfigWithCredentialsEntity, ) -> tuple[list[PromptMessage], Optional[list[str]]]: @@ -214,7 +216,7 @@ class SimplePromptTransform(PromptTransform): inputs: dict, query: str, context: Optional[str], - files: list["FileVar"], + files: list["File"], memory: Optional[TokenBufferMemory], model_config: ModelConfigWithCredentialsEntity, ) -> tuple[list[PromptMessage], Optional[list[str]]]: @@ -261,11 +263,12 @@ class SimplePromptTransform(PromptTransform): return [self.get_last_user_message(prompt, files)], stops - def get_last_user_message(self, prompt: str, files: list["FileVar"]) -> UserPromptMessage: + def get_last_user_message(self, prompt: str, files: list["File"]) -> UserPromptMessage: if files: - prompt_message_contents = [TextPromptMessageContent(data=prompt)] + prompt_message_contents: list[PromptMessageContent] = [] + prompt_message_contents.append(TextPromptMessageContent(data=prompt)) for file in files: - prompt_message_contents.append(file.prompt_message_content) + prompt_message_contents.append(file_manager.to_prompt_message_content(file)) prompt_message = UserPromptMessage(content=prompt_message_contents) else: diff --git a/api/core/prompt/utils/extract_thread_messages.py b/api/core/prompt/utils/extract_thread_messages.py index e8b626499f..f7aef76c87 100644 --- a/api/core/prompt/utils/extract_thread_messages.py +++ b/api/core/prompt/utils/extract_thread_messages.py @@ -1,7 +1,9 @@ +from typing import Any + from constants import UUID_NIL -def extract_thread_messages(messages: list[dict]) -> list[dict]: +def extract_thread_messages(messages: list[Any]): thread_messages = [] next_message = None diff --git a/api/core/prompt/utils/prompt_message_util.py b/api/core/prompt/utils/prompt_message_util.py index 29494db221..5eec5e3c99 100644 --- a/api/core/prompt/utils/prompt_message_util.py +++ b/api/core/prompt/utils/prompt_message_util.py @@ -1,7 +1,8 @@ from typing import cast -from core.model_runtime.entities.message_entities import ( +from core.model_runtime.entities import ( AssistantPromptMessage, + AudioPromptMessageContent, ImagePromptMessageContent, PromptMessage, PromptMessageContentType, @@ -21,7 +22,7 @@ class PromptMessageUtil: :return: """ prompts = [] - if model_mode == ModelMode.CHAT.value: + if model_mode == ModelMode.CHAT: tool_calls = [] for prompt_message in prompt_messages: if prompt_message.role == PromptMessageRole.USER: @@ -51,11 +52,9 @@ class PromptMessageUtil: files = [] if isinstance(prompt_message.content, list): for content in prompt_message.content: - if content.type == PromptMessageContentType.TEXT: - content = cast(TextPromptMessageContent, content) + if isinstance(content, TextPromptMessageContent): text += content.data - else: - content = cast(ImagePromptMessageContent, content) + elif isinstance(content, ImagePromptMessageContent): files.append( { "type": "image", @@ -63,6 +62,14 @@ class PromptMessageUtil: "detail": content.detail.value, } ) + elif isinstance(content, AudioPromptMessageContent): + files.append( + { + "type": "audio", + "data": content.data[:10] + "...[TRUNCATED]..." + content.data[-10:], + "format": content.format, + } + ) else: text = prompt_message.content diff --git a/api/core/prompt/utils/prompt_template_parser.py b/api/core/prompt/utils/prompt_template_parser.py index 8111559675..0fd08c5d3c 100644 --- a/api/core/prompt/utils/prompt_template_parser.py +++ b/api/core/prompt/utils/prompt_template_parser.py @@ -33,7 +33,7 @@ class PromptTemplateParser: key = match.group(1) value = inputs.get(key, match.group(0)) # return original matched string if key not found - if remove_template_variables: + if remove_template_variables and isinstance(value, str): return PromptTemplateParser.remove_template_variables(value, self.with_variable_tmpl) return value diff --git a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py index 69d2aa4f76..3811458e02 100644 --- a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py +++ b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py @@ -428,14 +428,13 @@ class QdrantVectorFactory(AbstractVectorFactory): if not dataset.index_struct_dict: dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.QDRANT, collection_name)) - config = current_app.config return QdrantVector( collection_name=collection_name, group_id=dataset.id, config=QdrantConfig( endpoint=dify_config.QDRANT_URL, api_key=dify_config.QDRANT_API_KEY, - root_path=config.root_path, + root_path=current_app.config.root_path, timeout=dify_config.QDRANT_CLIENT_TIMEOUT, grpc_port=dify_config.QDRANT_GRPC_PORT, prefer_grpc=dify_config.QDRANT_GRPC_ENABLED, diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/__init__.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_entities.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_entities.py new file mode 100644 index 0000000000..1e62b3c589 --- /dev/null +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_entities.py @@ -0,0 +1,17 @@ +from typing import Optional + +from pydantic import BaseModel + + +class ClusterEntity(BaseModel): + """ + Model Config Entity. + """ + + name: str + cluster_id: str + displayName: str + region: str + spendingLimit: Optional[int] = 1000 + version: str + createdBy: str diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py new file mode 100644 index 0000000000..a38f84636e --- /dev/null +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py @@ -0,0 +1,526 @@ +import json +import os +import uuid +from collections.abc import Generator, Iterable, Sequence +from itertools import islice +from typing import TYPE_CHECKING, Any, Optional, Union, cast + +import qdrant_client +import requests +from flask import current_app +from pydantic import BaseModel +from qdrant_client.http import models as rest +from qdrant_client.http.models import ( + FilterSelector, + HnswConfigDiff, + PayloadSchemaType, + TextIndexParams, + TextIndexType, + TokenizerType, +) +from qdrant_client.local.qdrant_local import QdrantLocal +from requests.auth import HTTPDigestAuth + +from configs import dify_config +from core.rag.datasource.vdb.field import Field +from core.rag.datasource.vdb.tidb_on_qdrant.tidb_service import TidbService +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.embedding.embedding_base import Embeddings +from core.rag.models.document import Document +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.dataset import Dataset, TidbAuthBinding + +if TYPE_CHECKING: + from qdrant_client import grpc # noqa + from qdrant_client.conversions import common_types + from qdrant_client.http import models as rest + + DictFilter = dict[str, Union[str, int, bool, dict, list]] + MetadataFilter = Union[DictFilter, common_types.Filter] + + +class TidbOnQdrantConfig(BaseModel): + endpoint: str + api_key: Optional[str] = None + timeout: float = 20 + root_path: Optional[str] = None + grpc_port: int = 6334 + prefer_grpc: bool = False + + def to_qdrant_params(self): + if self.endpoint and self.endpoint.startswith("path:"): + path = self.endpoint.replace("path:", "") + if not os.path.isabs(path): + path = os.path.join(self.root_path, path) + + return {"path": path} + else: + return { + "url": self.endpoint, + "api_key": self.api_key, + "timeout": self.timeout, + "verify": False, + "grpc_port": self.grpc_port, + "prefer_grpc": self.prefer_grpc, + } + + +class TidbConfig(BaseModel): + api_url: str + public_key: str + private_key: str + + +class TidbOnQdrantVector(BaseVector): + def __init__(self, collection_name: str, group_id: str, config: TidbOnQdrantConfig, distance_func: str = "Cosine"): + super().__init__(collection_name) + self._client_config = config + self._client = qdrant_client.QdrantClient(**self._client_config.to_qdrant_params()) + self._distance_func = distance_func.upper() + self._group_id = group_id + + def get_type(self) -> str: + return VectorType.TIDB_ON_QDRANT + + def to_index_struct(self) -> dict: + return {"type": self.get_type(), "vector_store": {"class_prefix": self._collection_name}} + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + if texts: + # get embedding vector size + vector_size = len(embeddings[0]) + # get collection name + collection_name = self._collection_name + # create collection + self.create_collection(collection_name, vector_size) + + self.add_texts(texts, embeddings, **kwargs) + + def create_collection(self, collection_name: str, vector_size: int): + lock_name = "vector_indexing_lock_{}".format(collection_name) + with redis_client.lock(lock_name, timeout=20): + collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + if redis_client.get(collection_exist_cache_key): + return + collection_name = collection_name or uuid.uuid4().hex + all_collection_name = [] + collections_response = self._client.get_collections() + collection_list = collections_response.collections + for collection in collection_list: + all_collection_name.append(collection.name) + if collection_name not in all_collection_name: + from qdrant_client.http import models as rest + + vectors_config = rest.VectorParams( + size=vector_size, + distance=rest.Distance[self._distance_func], + ) + hnsw_config = HnswConfigDiff( + m=0, + payload_m=16, + ef_construct=100, + full_scan_threshold=10000, + max_indexing_threads=0, + on_disk=False, + ) + self._client.recreate_collection( + collection_name=collection_name, + vectors_config=vectors_config, + hnsw_config=hnsw_config, + timeout=int(self._client_config.timeout), + ) + + # create group_id payload index + self._client.create_payload_index( + collection_name, Field.GROUP_KEY.value, field_schema=PayloadSchemaType.KEYWORD + ) + # create doc_id payload index + self._client.create_payload_index( + collection_name, Field.DOC_ID.value, field_schema=PayloadSchemaType.KEYWORD + ) + # create full text index + text_index_params = TextIndexParams( + type=TextIndexType.TEXT, + tokenizer=TokenizerType.MULTILINGUAL, + min_token_len=2, + max_token_len=20, + lowercase=True, + ) + self._client.create_payload_index( + collection_name, Field.CONTENT_KEY.value, field_schema=text_index_params + ) + redis_client.set(collection_exist_cache_key, 1, ex=3600) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + uuids = self._get_uuids(documents) + texts = [d.page_content for d in documents] + metadatas = [d.metadata for d in documents] + + added_ids = [] + for batch_ids, points in self._generate_rest_batches(texts, embeddings, metadatas, uuids, 64, self._group_id): + self._client.upsert(collection_name=self._collection_name, points=points) + added_ids.extend(batch_ids) + + return added_ids + + def _generate_rest_batches( + self, + texts: Iterable[str], + embeddings: list[list[float]], + metadatas: Optional[list[dict]] = None, + ids: Optional[Sequence[str]] = None, + batch_size: int = 64, + group_id: Optional[str] = None, + ) -> Generator[tuple[list[str], list[rest.PointStruct]], None, None]: + from qdrant_client.http import models as rest + + texts_iterator = iter(texts) + embeddings_iterator = iter(embeddings) + metadatas_iterator = iter(metadatas or []) + ids_iterator = iter(ids or [uuid.uuid4().hex for _ in iter(texts)]) + while batch_texts := list(islice(texts_iterator, batch_size)): + # Take the corresponding metadata and id for each text in a batch + batch_metadatas = list(islice(metadatas_iterator, batch_size)) or None + batch_ids = list(islice(ids_iterator, batch_size)) + + # Generate the embeddings for all the texts in a batch + batch_embeddings = list(islice(embeddings_iterator, batch_size)) + + points = [ + rest.PointStruct( + id=point_id, + vector=vector, + payload=payload, + ) + for point_id, vector, payload in zip( + batch_ids, + batch_embeddings, + self._build_payloads( + batch_texts, + batch_metadatas, + Field.CONTENT_KEY.value, + Field.METADATA_KEY.value, + group_id, + Field.GROUP_KEY.value, + ), + ) + ] + + yield batch_ids, points + + @classmethod + def _build_payloads( + cls, + texts: Iterable[str], + metadatas: Optional[list[dict]], + content_payload_key: str, + metadata_payload_key: str, + group_id: str, + group_payload_key: str, + ) -> list[dict]: + payloads = [] + for i, text in enumerate(texts): + if text is None: + raise ValueError( + "At least one of the texts is None. Please remove it before " + "calling .from_texts or .add_texts on Qdrant instance." + ) + metadata = metadatas[i] if metadatas is not None else None + payloads.append({content_payload_key: text, metadata_payload_key: metadata, group_payload_key: group_id}) + + return payloads + + def delete_by_metadata_field(self, key: str, value: str): + from qdrant_client.http import models + from qdrant_client.http.exceptions import UnexpectedResponse + + try: + filter = models.Filter( + must=[ + models.FieldCondition( + key=f"metadata.{key}", + match=models.MatchValue(value=value), + ), + ], + ) + + self._reload_if_needed() + + self._client.delete( + collection_name=self._collection_name, + points_selector=FilterSelector(filter=filter), + ) + except UnexpectedResponse as e: + # Collection does not exist, so return + if e.status_code == 404: + return + # Some other error occurred, so re-raise the exception + else: + raise e + + def delete(self): + from qdrant_client.http.exceptions import UnexpectedResponse + + try: + self._client.delete_collection(collection_name=self._collection_name) + except UnexpectedResponse as e: + # Collection does not exist, so return + if e.status_code == 404: + return + # Some other error occurred, so re-raise the exception + else: + raise e + + def delete_by_ids(self, ids: list[str]) -> None: + from qdrant_client.http import models + from qdrant_client.http.exceptions import UnexpectedResponse + + for node_id in ids: + try: + filter = models.Filter( + must=[ + models.FieldCondition( + key="metadata.doc_id", + match=models.MatchValue(value=node_id), + ), + ], + ) + self._client.delete( + collection_name=self._collection_name, + points_selector=FilterSelector(filter=filter), + ) + except UnexpectedResponse as e: + # Collection does not exist, so return + if e.status_code == 404: + return + # Some other error occurred, so re-raise the exception + else: + raise e + + def text_exists(self, id: str) -> bool: + all_collection_name = [] + collections_response = self._client.get_collections() + collection_list = collections_response.collections + for collection in collection_list: + all_collection_name.append(collection.name) + if self._collection_name not in all_collection_name: + return False + response = self._client.retrieve(collection_name=self._collection_name, ids=[id]) + + return len(response) > 0 + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + from qdrant_client.http import models + + filter = models.Filter( + must=[ + models.FieldCondition( + key="group_id", + match=models.MatchValue(value=self._group_id), + ), + ], + ) + results = self._client.search( + collection_name=self._collection_name, + query_vector=query_vector, + query_filter=filter, + limit=kwargs.get("top_k", 4), + with_payload=True, + with_vectors=True, + score_threshold=kwargs.get("score_threshold", 0.0), + ) + docs = [] + for result in results: + metadata = result.payload.get(Field.METADATA_KEY.value) or {} + # duplicate check score threshold + score_threshold = kwargs.get("score_threshold") or 0.0 + if result.score > score_threshold: + metadata["score"] = result.score + doc = Document( + page_content=result.payload.get(Field.CONTENT_KEY.value), + metadata=metadata, + ) + docs.append(doc) + # Sort the documents by score in descending order + docs = sorted(docs, key=lambda x: x.metadata["score"], reverse=True) + return docs + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + """Return docs most similar by bm25. + Returns: + List of documents most similar to the query text and distance for each. + """ + from qdrant_client.http import models + + scroll_filter = models.Filter( + must=[ + models.FieldCondition( + key="page_content", + match=models.MatchText(text=query), + ) + ] + ) + response = self._client.scroll( + collection_name=self._collection_name, + scroll_filter=scroll_filter, + limit=kwargs.get("top_k", 2), + with_payload=True, + with_vectors=True, + ) + results = response[0] + documents = [] + for result in results: + if result: + document = self._document_from_scored_point(result, Field.CONTENT_KEY.value, Field.METADATA_KEY.value) + document.metadata["vector"] = result.vector + documents.append(document) + + return documents + + def _reload_if_needed(self): + if isinstance(self._client, QdrantLocal): + self._client = cast(QdrantLocal, self._client) + self._client._load() + + @classmethod + def _document_from_scored_point( + cls, + scored_point: Any, + content_payload_key: str, + metadata_payload_key: str, + ) -> Document: + return Document( + page_content=scored_point.payload.get(content_payload_key), + metadata=scored_point.payload.get(metadata_payload_key) or {}, + ) + + +class TidbOnQdrantVectorFactory(AbstractVectorFactory): + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> TidbOnQdrantVector: + tidb_auth_binding = ( + db.session.query(TidbAuthBinding).filter(TidbAuthBinding.tenant_id == dataset.tenant_id).one_or_none() + ) + if not tidb_auth_binding: + idle_tidb_auth_binding = ( + db.session.query(TidbAuthBinding) + .filter(TidbAuthBinding.active == False, TidbAuthBinding.status == "ACTIVE") + .limit(1) + .one_or_none() + ) + if idle_tidb_auth_binding: + idle_tidb_auth_binding.active = True + idle_tidb_auth_binding.tenant_id = dataset.tenant_id + db.session.commit() + TIDB_ON_QDRANT_API_KEY = f"{idle_tidb_auth_binding.account}:{idle_tidb_auth_binding.password}" + else: + with redis_client.lock("create_tidb_serverless_cluster_lock", timeout=900): + tidb_auth_binding = ( + db.session.query(TidbAuthBinding) + .filter(TidbAuthBinding.tenant_id == dataset.tenant_id) + .one_or_none() + ) + if tidb_auth_binding: + TIDB_ON_QDRANT_API_KEY = f"{tidb_auth_binding.account}:{tidb_auth_binding.password}" + + else: + new_cluster = TidbService.create_tidb_serverless_cluster( + dify_config.TIDB_PROJECT_ID, + dify_config.TIDB_API_URL, + dify_config.TIDB_IAM_API_URL, + dify_config.TIDB_PUBLIC_KEY, + dify_config.TIDB_PRIVATE_KEY, + dify_config.TIDB_REGION, + ) + new_tidb_auth_binding = TidbAuthBinding( + cluster_id=new_cluster["cluster_id"], + cluster_name=new_cluster["cluster_name"], + account=new_cluster["account"], + password=new_cluster["password"], + tenant_id=dataset.tenant_id, + active=True, + status="ACTIVE", + ) + db.session.add(new_tidb_auth_binding) + db.session.commit() + TIDB_ON_QDRANT_API_KEY = f"{new_tidb_auth_binding.account}:{new_tidb_auth_binding.password}" + + else: + TIDB_ON_QDRANT_API_KEY = f"{tidb_auth_binding.account}:{tidb_auth_binding.password}" + + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id) + dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.TIDB_ON_QDRANT, collection_name)) + + config = current_app.config + + return TidbOnQdrantVector( + collection_name=collection_name, + group_id=dataset.id, + config=TidbOnQdrantConfig( + endpoint=dify_config.TIDB_ON_QDRANT_URL, + api_key=TIDB_ON_QDRANT_API_KEY, + root_path=config.root_path, + timeout=dify_config.TIDB_ON_QDRANT_CLIENT_TIMEOUT, + grpc_port=dify_config.TIDB_ON_QDRANT_GRPC_PORT, + prefer_grpc=dify_config.TIDB_ON_QDRANT_GRPC_ENABLED, + ), + ) + + def create_tidb_serverless_cluster(self, tidb_config: TidbConfig, display_name: str, region: str): + """ + Creates a new TiDB Serverless cluster. + :param tidb_config: The configuration for the TiDB Cloud API. + :param display_name: The user-friendly display name of the cluster (required). + :param region: The region where the cluster will be created (required). + + :return: The response from the API. + """ + region_object = { + "name": region, + } + + labels = { + "tidb.cloud/project": "1372813089454548012", + } + cluster_data = {"displayName": display_name, "region": region_object, "labels": labels} + + response = requests.post( + f"{tidb_config.api_url}/clusters", + json=cluster_data, + auth=HTTPDigestAuth(tidb_config.public_key, tidb_config.private_key), + ) + + if response.status_code == 200: + return response.json() + else: + response.raise_for_status() + + def change_tidb_serverless_root_password(self, tidb_config: TidbConfig, cluster_id: str, new_password: str): + """ + Changes the root password of a specific TiDB Serverless cluster. + + :param tidb_config: The configuration for the TiDB Cloud API. + :param cluster_id: The ID of the cluster for which the password is to be changed (required). + :param new_password: The new password for the root user (required). + :return: The response from the API. + """ + + body = {"password": new_password} + + response = requests.put( + f"{tidb_config.api_url}/clusters/{cluster_id}/password", + json=body, + auth=HTTPDigestAuth(tidb_config.public_key, tidb_config.private_key), + ) + + if response.status_code == 200: + return response.json() + else: + response.raise_for_status() diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py new file mode 100644 index 0000000000..f10d6339ee --- /dev/null +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py @@ -0,0 +1,250 @@ +import time +import uuid + +import requests +from requests.auth import HTTPDigestAuth + +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.dataset import TidbAuthBinding + + +class TidbService: + @staticmethod + def create_tidb_serverless_cluster( + project_id: str, api_url: str, iam_url: str, public_key: str, private_key: str, region: str + ): + """ + Creates a new TiDB Serverless cluster. + :param project_id: The project ID of the TiDB Cloud project (required). + :param api_url: The URL of the TiDB Cloud API (required). + :param iam_url: The URL of the TiDB Cloud IAM API (required). + :param public_key: The public key for the API (required). + :param private_key: The private key for the API (required). + :param display_name: The user-friendly display name of the cluster (required). + :param region: The region where the cluster will be created (required). + + :return: The response from the API. + """ + + region_object = { + "name": region, + } + + labels = { + "tidb.cloud/project": project_id, + } + + spending_limit = { + "monthly": 100, + } + password = str(uuid.uuid4()).replace("-", "")[:16] + display_name = str(uuid.uuid4()).replace("-", "")[:16] + cluster_data = { + "displayName": display_name, + "region": region_object, + "labels": labels, + "spendingLimit": spending_limit, + "rootPassword": password, + } + + response = requests.post(f"{api_url}/clusters", json=cluster_data, auth=HTTPDigestAuth(public_key, private_key)) + + if response.status_code == 200: + response_data = response.json() + cluster_id = response_data["clusterId"] + retry_count = 0 + max_retries = 30 + while retry_count < max_retries: + cluster_response = TidbService.get_tidb_serverless_cluster(api_url, public_key, private_key, cluster_id) + if cluster_response["state"] == "ACTIVE": + user_prefix = cluster_response["userPrefix"] + return { + "cluster_id": cluster_id, + "cluster_name": display_name, + "account": f"{user_prefix}.root", + "password": password, + } + time.sleep(30) # wait 30 seconds before retrying + retry_count += 1 + else: + response.raise_for_status() + + @staticmethod + def delete_tidb_serverless_cluster(api_url: str, public_key: str, private_key: str, cluster_id: str): + """ + Deletes a specific TiDB Serverless cluster. + + :param api_url: The URL of the TiDB Cloud API (required). + :param public_key: The public key for the API (required). + :param private_key: The private key for the API (required). + :param cluster_id: The ID of the cluster to be deleted (required). + :return: The response from the API. + """ + + response = requests.delete(f"{api_url}/clusters/{cluster_id}", auth=HTTPDigestAuth(public_key, private_key)) + + if response.status_code == 200: + return response.json() + else: + response.raise_for_status() + + @staticmethod + def get_tidb_serverless_cluster(api_url: str, public_key: str, private_key: str, cluster_id: str): + """ + Deletes a specific TiDB Serverless cluster. + + :param api_url: The URL of the TiDB Cloud API (required). + :param public_key: The public key for the API (required). + :param private_key: The private key for the API (required). + :param cluster_id: The ID of the cluster to be deleted (required). + :return: The response from the API. + """ + + response = requests.get(f"{api_url}/clusters/{cluster_id}", auth=HTTPDigestAuth(public_key, private_key)) + + if response.status_code == 200: + return response.json() + else: + response.raise_for_status() + + @staticmethod + def change_tidb_serverless_root_password( + api_url: str, public_key: str, private_key: str, cluster_id: str, account: str, new_password: str + ): + """ + Changes the root password of a specific TiDB Serverless cluster. + + :param api_url: The URL of the TiDB Cloud API (required). + :param public_key: The public key for the API (required). + :param private_key: The private key for the API (required). + :param cluster_id: The ID of the cluster for which the password is to be changed (required).+ + :param account: The account for which the password is to be changed (required). + :param new_password: The new password for the root user (required). + :return: The response from the API. + """ + + body = {"password": new_password, "builtinRole": "role_admin", "customRoles": []} + + response = requests.patch( + f"{api_url}/clusters/{cluster_id}/sqlUsers/{account}", + json=body, + auth=HTTPDigestAuth(public_key, private_key), + ) + + if response.status_code == 200: + return response.json() + else: + response.raise_for_status() + + @staticmethod + def batch_update_tidb_serverless_cluster_status( + tidb_serverless_list: list[TidbAuthBinding], + project_id: str, + api_url: str, + iam_url: str, + public_key: str, + private_key: str, + ) -> list[dict]: + """ + Update the status of a new TiDB Serverless cluster. + :param project_id: The project ID of the TiDB Cloud project (required). + :param api_url: The URL of the TiDB Cloud API (required). + :param iam_url: The URL of the TiDB Cloud IAM API (required). + :param public_key: The public key for the API (required). + :param private_key: The private key for the API (required). + :param display_name: The user-friendly display name of the cluster (required). + :param region: The region where the cluster will be created (required). + + :return: The response from the API. + """ + clusters = [] + tidb_serverless_list_map = {item.cluster_id: item for item in tidb_serverless_list} + cluster_ids = [item.cluster_id for item in tidb_serverless_list] + params = {"clusterIds": cluster_ids, "view": "FULL"} + response = requests.get( + f"{api_url}/clusters:batchGet", params=params, auth=HTTPDigestAuth(public_key, private_key) + ) + + if response.status_code == 200: + response_data = response.json() + cluster_infos = [] + for item in response_data["clusters"]: + state = item["state"] + userPrefix = item["userPrefix"] + if state == "ACTIVE" and len(userPrefix) > 0: + cluster_info = tidb_serverless_list_map[item["clusterId"]] + cluster_info.status = "ACTIVE" + cluster_info.account = f"{userPrefix}.root" + db.session.add(cluster_info) + db.session.commit() + else: + response.raise_for_status() + + @staticmethod + def batch_create_tidb_serverless_cluster( + batch_size: int, project_id: str, api_url: str, iam_url: str, public_key: str, private_key: str, region: str + ) -> list[dict]: + """ + Creates a new TiDB Serverless cluster. + :param project_id: The project ID of the TiDB Cloud project (required). + :param api_url: The URL of the TiDB Cloud API (required). + :param iam_url: The URL of the TiDB Cloud IAM API (required). + :param public_key: The public key for the API (required). + :param private_key: The private key for the API (required). + :param display_name: The user-friendly display name of the cluster (required). + :param region: The region where the cluster will be created (required). + + :return: The response from the API. + """ + clusters = [] + for _ in range(batch_size): + region_object = { + "name": region, + } + + labels = { + "tidb.cloud/project": project_id, + } + + spending_limit = { + "monthly": 10, + } + password = str(uuid.uuid4()).replace("-", "")[:16] + display_name = str(uuid.uuid4()).replace("-", "") + cluster_data = { + "cluster": { + "displayName": display_name, + "region": region_object, + "labels": labels, + "spendingLimit": spending_limit, + "rootPassword": password, + } + } + cache_key = f"tidb_serverless_cluster_password:{display_name}" + redis_client.setex(cache_key, 3600, password) + clusters.append(cluster_data) + + request_body = {"requests": clusters} + response = requests.post( + f"{api_url}/clusters:batchCreate", json=request_body, auth=HTTPDigestAuth(public_key, private_key) + ) + + if response.status_code == 200: + response_data = response.json() + cluster_infos = [] + for item in response_data["clusters"]: + cache_key = f"tidb_serverless_cluster_password:{item['displayName']}" + password = redis_client.get(cache_key) + if not password: + continue + cluster_info = { + "cluster_id": item["clusterId"], + "cluster_name": item["displayName"], + "account": "root", + "password": password.decode("utf-8"), + } + cluster_infos.append(cluster_info) + return cluster_infos + else: + response.raise_for_status() diff --git a/api/core/rag/datasource/vdb/vector_factory.py b/api/core/rag/datasource/vdb/vector_factory.py index fb956a16ed..59a5aadacd 100644 --- a/api/core/rag/datasource/vdb/vector_factory.py +++ b/api/core/rag/datasource/vdb/vector_factory.py @@ -9,8 +9,9 @@ from core.rag.datasource.vdb.vector_type import VectorType from core.rag.embedding.cached_embedding import CacheEmbedding from core.rag.embedding.embedding_base import Embeddings from core.rag.models.document import Document +from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.dataset import Dataset +from models.dataset import Dataset, Whitelist class AbstractVectorFactory(ABC): @@ -35,8 +36,18 @@ class Vector: def _init_vector(self) -> BaseVector: vector_type = dify_config.VECTOR_STORE + if self._dataset.index_struct_dict: vector_type = self._dataset.index_struct_dict["type"] + else: + if dify_config.VECTOR_STORE_WHITELIST_ENABLE: + whitelist = ( + db.session.query(Whitelist) + .filter(Whitelist.tenant_id == self._dataset.tenant_id, Whitelist.category == "vector_db") + .one_or_none() + ) + if whitelist: + vector_type = VectorType.TIDB_ON_QDRANT if not vector_type: raise ValueError("Vector store must be specified.") @@ -111,6 +122,14 @@ class Vector: from core.rag.datasource.vdb.vikingdb.vikingdb_vector import VikingDBVectorFactory return VikingDBVectorFactory + case VectorType.UPSTASH: + from core.rag.datasource.vdb.upstash.upstash_vector import UpstashVectorFactory + + return UpstashVectorFactory + case VectorType.TIDB_ON_QDRANT: + from core.rag.datasource.vdb.tidb_on_qdrant.tidb_on_qdrant_vector import TidbOnQdrantVectorFactory + + return TidbOnQdrantVectorFactory case _: raise ValueError(f"Vector store {vector_type} is not supported.") diff --git a/api/core/rag/datasource/vdb/vector_type.py b/api/core/rag/datasource/vdb/vector_type.py index b4d604a080..3b6df94f78 100644 --- a/api/core/rag/datasource/vdb/vector_type.py +++ b/api/core/rag/datasource/vdb/vector_type.py @@ -18,3 +18,5 @@ class VectorType(str, Enum): ELASTICSEARCH = "elasticsearch" BAIDU = "baidu" VIKINGDB = "vikingdb" + UPSTASH = "upstash" + TIDB_ON_QDRANT = "tidb_on_qdrant" diff --git a/api/core/rag/extractor/extract_processor.py b/api/core/rag/extractor/extract_processor.py index 706a42b735..a0b1aa4cef 100644 --- a/api/core/rag/extractor/extract_processor.py +++ b/api/core/rag/extractor/extract_processor.py @@ -105,7 +105,7 @@ class ExtractProcessor: extractor = PdfExtractor(file_path) elif file_extension in {".md", ".markdown"}: extractor = ( - UnstructuredMarkdownExtractor(file_path, unstructured_api_url) + UnstructuredMarkdownExtractor(file_path, unstructured_api_url, unstructured_api_key) if is_automatic else MarkdownExtractor(file_path, autodetect_encoding=True) ) @@ -116,17 +116,19 @@ class ExtractProcessor: elif file_extension == ".csv": extractor = CSVExtractor(file_path, autodetect_encoding=True) elif file_extension == ".msg": - extractor = UnstructuredMsgExtractor(file_path, unstructured_api_url) + extractor = UnstructuredMsgExtractor(file_path, unstructured_api_url, unstructured_api_key) elif file_extension == ".eml": - extractor = UnstructuredEmailExtractor(file_path, unstructured_api_url) + extractor = UnstructuredEmailExtractor(file_path, unstructured_api_url, unstructured_api_key) elif file_extension == ".ppt": extractor = UnstructuredPPTExtractor(file_path, unstructured_api_url, unstructured_api_key) + # You must first specify the API key + # because unstructured_api_key is necessary to parse .ppt documents elif file_extension == ".pptx": - extractor = UnstructuredPPTXExtractor(file_path, unstructured_api_url) + extractor = UnstructuredPPTXExtractor(file_path, unstructured_api_url, unstructured_api_key) elif file_extension == ".xml": - extractor = UnstructuredXmlExtractor(file_path, unstructured_api_url) + extractor = UnstructuredXmlExtractor(file_path, unstructured_api_url, unstructured_api_key) elif file_extension == ".epub": - extractor = UnstructuredEpubExtractor(file_path, unstructured_api_url) + extractor = UnstructuredEpubExtractor(file_path, unstructured_api_url, unstructured_api_key) else: # txt extractor = ( diff --git a/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py b/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py index 34c6811b67..bd669bbad3 100644 --- a/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py @@ -10,24 +10,26 @@ logger = logging.getLogger(__name__) class UnstructuredEmailExtractor(BaseExtractor): - """Load msg files. + """Load eml files. Args: file_path: Path to the file to load. """ - def __init__( - self, - file_path: str, - api_url: str, - ): + def __init__(self, file_path: str, api_url: str, api_key: str): """Initialize with file path.""" self._file_path = file_path self._api_url = api_url + self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.email import partition_email + if self._api_url: + from unstructured.partition.api import partition_via_api - elements = partition_email(filename=self._file_path) + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + from unstructured.partition.email import partition_email + + elements = partition_email(filename=self._file_path) # noinspection PyBroadException try: diff --git a/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py b/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py index a41ed3a558..35220b558a 100644 --- a/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py @@ -19,15 +19,23 @@ class UnstructuredEpubExtractor(BaseExtractor): self, file_path: str, api_url: Optional[str] = None, + api_key: Optional[str] = None, ): """Initialize with file path.""" self._file_path = file_path self._api_url = api_url + self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.epub import partition_epub + if self._api_url: + from unstructured.partition.api import partition_via_api + + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + from unstructured.partition.epub import partition_epub + + elements = partition_epub(filename=self._file_path, xml_keep_tags=True) - elements = partition_epub(filename=self._file_path, xml_keep_tags=True) from unstructured.chunking.title import chunk_by_title chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) diff --git a/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py b/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py index fc3ff10693..4173d4d122 100644 --- a/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_markdown_extractor.py @@ -24,19 +24,21 @@ class UnstructuredMarkdownExtractor(BaseExtractor): if the specified encoding fails. """ - def __init__( - self, - file_path: str, - api_url: str, - ): + def __init__(self, file_path: str, api_url: str, api_key: str): """Initialize with file path.""" self._file_path = file_path self._api_url = api_url + self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.md import partition_md + if self._api_url: + from unstructured.partition.api import partition_via_api - elements = partition_md(filename=self._file_path) + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + from unstructured.partition.md import partition_md + + elements = partition_md(filename=self._file_path) from unstructured.chunking.title import chunk_by_title chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) diff --git a/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py b/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py index 8091e83e85..57affb8d36 100644 --- a/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_msg_extractor.py @@ -14,15 +14,21 @@ class UnstructuredMsgExtractor(BaseExtractor): file_path: Path to the file to load. """ - def __init__(self, file_path: str, api_url: str): + def __init__(self, file_path: str, api_url: str, api_key: str): """Initialize with file path.""" self._file_path = file_path self._api_url = api_url + self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.msg import partition_msg + if self._api_url: + from unstructured.partition.api import partition_via_api - elements = partition_msg(filename=self._file_path) + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + from unstructured.partition.msg import partition_msg + + elements = partition_msg(filename=self._file_path) from unstructured.chunking.title import chunk_by_title chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) diff --git a/api/core/rag/extractor/unstructured/unstructured_ppt_extractor.py b/api/core/rag/extractor/unstructured/unstructured_ppt_extractor.py index b69394b3b1..0fdcd58b2e 100644 --- a/api/core/rag/extractor/unstructured/unstructured_ppt_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_ppt_extractor.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) class UnstructuredPPTExtractor(BaseExtractor): - """Load msg files. + """Load ppt files. Args: @@ -21,9 +21,12 @@ class UnstructuredPPTExtractor(BaseExtractor): self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.api import partition_via_api + if self._api_url: + from unstructured.partition.api import partition_via_api - elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + raise NotImplementedError("Unstructured API Url is not configured") text_by_page = {} for element in elements: page = element.metadata.page_number diff --git a/api/core/rag/extractor/unstructured/unstructured_pptx_extractor.py b/api/core/rag/extractor/unstructured/unstructured_pptx_extractor.py index 6ed4a0dfb3..ab41290fbc 100644 --- a/api/core/rag/extractor/unstructured/unstructured_pptx_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_pptx_extractor.py @@ -7,22 +7,28 @@ logger = logging.getLogger(__name__) class UnstructuredPPTXExtractor(BaseExtractor): - """Load msg files. + """Load pptx files. Args: file_path: Path to the file to load. """ - def __init__(self, file_path: str, api_url: str): + def __init__(self, file_path: str, api_url: str, api_key: str): """Initialize with file path.""" self._file_path = file_path self._api_url = api_url + self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.pptx import partition_pptx + if self._api_url: + from unstructured.partition.api import partition_via_api - elements = partition_pptx(filename=self._file_path) + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + from unstructured.partition.pptx import partition_pptx + + elements = partition_pptx(filename=self._file_path) text_by_page = {} for element in elements: page = element.metadata.page_number diff --git a/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py b/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py index 3bffc01fbf..ef46ab0e70 100644 --- a/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_xml_extractor.py @@ -7,22 +7,29 @@ logger = logging.getLogger(__name__) class UnstructuredXmlExtractor(BaseExtractor): - """Load msg files. + """Load xml files. Args: file_path: Path to the file to load. """ - def __init__(self, file_path: str, api_url: str): + def __init__(self, file_path: str, api_url: str, api_key: str): """Initialize with file path.""" self._file_path = file_path self._api_url = api_url + self._api_key = api_key def extract(self) -> list[Document]: - from unstructured.partition.xml import partition_xml + if self._api_url: + from unstructured.partition.api import partition_via_api + + elements = partition_via_api(filename=self._file_path, api_url=self._api_url, api_key=self._api_key) + else: + from unstructured.partition.xml import partition_xml + + elements = partition_xml(filename=self._file_path, xml_keep_tags=True) - elements = partition_xml(filename=self._file_path, xml_keep_tags=True) from unstructured.chunking.title import chunk_by_title chunks = chunk_by_title(elements, max_characters=2000, combine_text_under_n_chars=2000) diff --git a/api/core/rag/extractor/word_extractor.py b/api/core/rag/extractor/word_extractor.py index 7352ef378b..ae3c25125c 100644 --- a/api/core/rag/extractor/word_extractor.py +++ b/api/core/rag/extractor/word_extractor.py @@ -18,6 +18,7 @@ from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document from extensions.ext_database import db from extensions.ext_storage import storage +from models.enums import CreatedByRole from models.model import UploadFile logger = logging.getLogger(__name__) @@ -109,9 +110,10 @@ class WordExtractor(BaseExtractor): key=file_key, name=file_key, size=0, - extension=image_ext, - mime_type=mime_type, + extension=str(image_ext), + mime_type=mime_type or "", created_by=self.user_id, + created_by_role=CreatedByRole.ACCOUNT, created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None), used=True, used_by=self.user_id, @@ -121,7 +123,7 @@ class WordExtractor(BaseExtractor): db.session.add(upload_file) db.session.commit() image_map[rel.target_part] = ( - f"![image]({dify_config.CONSOLE_API_URL}/files/{upload_file.id}/image-preview)" + f"![image]({dify_config.CONSOLE_API_URL}/files/{upload_file.id}/file-preview)" ) return image_map @@ -232,7 +234,7 @@ class WordExtractor(BaseExtractor): def parse_paragraph(paragraph): paragraph_content = [] for run in paragraph.runs: - if hasattr(run.element, "tag") and isinstance(element.tag, str) and run.element.tag.endswith("r"): + if hasattr(run.element, "tag") and isinstance(run.element.tag, str) and run.element.tag.endswith("r"): drawing_elements = run.element.findall( ".//{http://schemas.openxmlformats.org/wordprocessingml/2006/main}drawing" ) diff --git a/api/core/rag/retrieval/router/multi_dataset_react_route.py b/api/core/rag/retrieval/router/multi_dataset_react_route.py index a0494adc60..68fab0c127 100644 --- a/api/core/rag/retrieval/router/multi_dataset_react_route.py +++ b/api/core/rag/retrieval/router/multi_dataset_react_route.py @@ -9,7 +9,7 @@ from core.prompt.advanced_prompt_transform import AdvancedPromptTransform from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate from core.rag.retrieval.output_parser.react_output import ReactAction from core.rag.retrieval.output_parser.structured_chat import StructuredChatOutputParser -from core.workflow.nodes.llm.llm_node import LLMNode +from core.workflow.nodes.llm import LLMNode PREFIX = """Respond to the human as helpfully and accurately as possible. You have access to the following tools:""" diff --git a/api/core/tools/entities/api_entities.py b/api/core/tools/entities/api_entities.py index b988a588e9..b1db559441 100644 --- a/api/core/tools/entities/api_entities.py +++ b/api/core/tools/entities/api_entities.py @@ -32,8 +32,8 @@ class UserToolProvider(BaseModel): original_credentials: Optional[dict] = None is_team_authorization: bool = False allow_delete: bool = True - tools: list[UserTool] = None - labels: list[str] = None + tools: list[UserTool] | None = None + labels: list[str] | None = None def to_dict(self) -> dict: # ------------- @@ -42,7 +42,7 @@ class UserToolProvider(BaseModel): for tool in tools: if tool.get("parameters"): for parameter in tool.get("parameters"): - if parameter.get("type") == ToolParameter.ToolParameterType.FILE.value: + if parameter.get("type") == ToolParameter.ToolParameterType.SYSTEM_FILES.value: parameter["type"] = "files" # ------------- diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 9962b559fa..d8637fd2cb 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -104,14 +104,15 @@ class ToolInvokeMessage(BaseModel): BLOB = "blob" JSON = "json" IMAGE_LINK = "image_link" - FILE_VAR = "file_var" + FILE = "file" type: MessageType = MessageType.TEXT """ plain text, image url or link url """ message: str | bytes | dict | None = None - meta: dict[str, Any] | None = None + # TODO: Use a BaseModel for meta + meta: dict[str, Any] = Field(default_factory=dict) save_as: str = "" @@ -143,6 +144,67 @@ class ToolParameter(BaseModel): SELECT = "select" SECRET_INPUT = "secret-input" FILE = "file" + FILES = "files" + + # deprecated, should not use. + SYSTEM_FILES = "systme-files" + + def as_normal_type(self): + if self in { + ToolParameter.ToolParameterType.SECRET_INPUT, + ToolParameter.ToolParameterType.SELECT, + }: + return "string" + return self.value + + def cast_value(self, value: Any, /): + try: + match self: + case ( + ToolParameter.ToolParameterType.STRING + | ToolParameter.ToolParameterType.SECRET_INPUT + | ToolParameter.ToolParameterType.SELECT + ): + if value is None: + return "" + else: + return value if isinstance(value, str) else str(value) + + case ToolParameter.ToolParameterType.BOOLEAN: + if value is None: + return False + elif isinstance(value, str): + # Allowed YAML boolean value strings: https://yaml.org/type/bool.html + # and also '0' for False and '1' for True + match value.lower(): + case "true" | "yes" | "y" | "1": + return True + case "false" | "no" | "n" | "0": + return False + case _: + return bool(value) + else: + return value if isinstance(value, bool) else bool(value) + + case ToolParameter.ToolParameterType.NUMBER: + if isinstance(value, int | float): + return value + elif isinstance(value, str) and value: + if "." in value: + return float(value) + else: + return int(value) + case ( + ToolParameter.ToolParameterType.SYSTEM_FILES + | ToolParameter.ToolParameterType.FILE + | ToolParameter.ToolParameterType.FILES + ): + return value + case _: + return str(value) + + except Exception: + raise ValueError(f"The tool parameter value {value} is not in correct type.") class ToolParameterForm(Enum): SCHEMA = "schema" # should be set while adding tool diff --git a/api/core/tools/provider/_position.yaml b/api/core/tools/provider/_position.yaml index 6bab9a09d8..336588e3e2 100644 --- a/api/core/tools/provider/_position.yaml +++ b/api/core/tools/provider/_position.yaml @@ -61,6 +61,7 @@ - vectorizer - qrcode - tianditu +- aliyuque - google_translate - hap - json_process diff --git a/api/core/tools/provider/builtin/aliyuque/tools/base.py b/api/core/tools/provider/builtin/aliyuque/tools/base.py index fb7e219bff..0046931abd 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/base.py +++ b/api/core/tools/provider/builtin/aliyuque/tools/base.py @@ -1,10 +1,3 @@ -""" -语雀客户端 -""" - -__author__ = "佐井" -__created__ = "2024-06-01 09:45:20" - from typing import Any import requests @@ -17,8 +10,10 @@ class AliYuqueTool: @staticmethod def auth(token): session = requests.Session() - session.headers.update({"Accept": "application/json", "X-Auth-Token": token}) - login = session.request("GET", AliYuqueTool.server_url + "/api/v2/user") + session.headers.update( + {"Accept": "application/json", "X-Auth-Token": token}) + login = session.request( + "GET", AliYuqueTool.server_url + "/api/v2/user") login.raise_for_status() resp = login.json() return resp @@ -27,24 +22,27 @@ class AliYuqueTool: if not token: raise Exception("token is required") session = requests.Session() - session.headers.update({"accept": "application/json", "X-Auth-Token": token}) + session.headers.update( + {"accept": "application/json", "X-Auth-Token": token}) new_params = {**tool_parameters} - # 找出需要替换的变量 - replacements = {k: v for k, v in new_params.items() if f"{{{k}}}" in path} - # 替换 path 中的变量 + replacements = {k: v for k, v in new_params.items() + if f"{{{k}}}" in path} + for key, value in replacements.items(): path = path.replace(f"{{{key}}}", str(value)) - del new_params[key] # 从 kwargs 中删除已经替换的变量 - # 请求接口 + del new_params[key] + if method.upper() in {"POST", "PUT"}: session.headers.update( { "Content-Type": "application/json", } ) - response = session.request(method.upper(), self.server_url + path, json=new_params) + response = session.request( + method.upper(), self.server_url + path, json=new_params) else: - response = session.request(method, self.server_url + path, params=new_params) + response = session.request( + method, self.server_url + path, params=new_params) response.raise_for_status() return response.text diff --git a/api/core/tools/provider/builtin/aliyuque/tools/create_document.py b/api/core/tools/provider/builtin/aliyuque/tools/create_document.py index feadc29258..01080fd1d5 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/create_document.py +++ b/api/core/tools/provider/builtin/aliyuque/tools/create_document.py @@ -1,10 +1,3 @@ -""" -创建文档 -""" - -__author__ = "佐井" -__created__ = "2024-06-01 10:45:20" - from typing import Any, Union from core.tools.entities.tool_entities import ToolInvokeMessage diff --git a/api/core/tools/provider/builtin/aliyuque/tools/create_document.yaml b/api/core/tools/provider/builtin/aliyuque/tools/create_document.yaml index b9d1c60327..6ac8ae6696 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/create_document.yaml +++ b/api/core/tools/provider/builtin/aliyuque/tools/create_document.yaml @@ -13,7 +13,7 @@ description: parameters: - name: book_id - type: number + type: string required: true form: llm label: diff --git a/api/core/tools/provider/builtin/aliyuque/tools/delete_document.py b/api/core/tools/provider/builtin/aliyuque/tools/delete_document.py index 74c731a944..ddbfa43114 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/delete_document.py +++ b/api/core/tools/provider/builtin/aliyuque/tools/delete_document.py @@ -1,11 +1,3 @@ -#!/usr/bin/env python3 -""" -删除文档 -""" - -__author__ = "佐井" -__created__ = "2024-09-17 22:04" - from typing import Any, Union from core.tools.entities.tool_entities import ToolInvokeMessage @@ -21,5 +13,6 @@ class AliYuqueDeleteDocumentTool(AliYuqueTool, BuiltinTool): if not token: raise Exception("token is required") return self.create_text_message( - self.request("DELETE", token, tool_parameters, "/api/v2/repos/{book_id}/docs/{id}") + self.request("DELETE", token, tool_parameters, + "/api/v2/repos/{book_id}/docs/{id}") ) diff --git a/api/core/tools/provider/builtin/aliyuque/tools/delete_document.yaml b/api/core/tools/provider/builtin/aliyuque/tools/delete_document.yaml index 87372c5350..dddd62d304 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/delete_document.yaml +++ b/api/core/tools/provider/builtin/aliyuque/tools/delete_document.yaml @@ -13,7 +13,7 @@ description: parameters: - name: book_id - type: number + type: string required: true form: llm label: diff --git a/api/core/tools/provider/builtin/aliyuque/tools/describe_book_index_page.py b/api/core/tools/provider/builtin/aliyuque/tools/describe_book_index_page.py index 02bf603a24..ec834dd640 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/describe_book_index_page.py +++ b/api/core/tools/provider/builtin/aliyuque/tools/describe_book_index_page.py @@ -1,10 +1,3 @@ -""" -获取知识库首页 -""" - -__author__ = "佐井" -__created__ = "2024-06-01 22:57:14" - from typing import Any, Union from core.tools.entities.tool_entities import ToolInvokeMessage @@ -20,5 +13,6 @@ class AliYuqueDescribeBookIndexPageTool(AliYuqueTool, BuiltinTool): if not token: raise Exception("token is required") return self.create_text_message( - self.request("GET", token, tool_parameters, "/api/v2/repos/{group_login}/{book_slug}/index_page") + self.request("GET", token, tool_parameters, + "/api/v2/repos/{group_login}/{book_slug}/index_page") ) diff --git a/api/core/tools/provider/builtin/aliyuque/tools/describe_book_table_of_contents.py b/api/core/tools/provider/builtin/aliyuque/tools/describe_book_table_of_contents.py index fcfe449c6d..36f8c10d6f 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/describe_book_table_of_contents.py +++ b/api/core/tools/provider/builtin/aliyuque/tools/describe_book_table_of_contents.py @@ -1,11 +1,3 @@ -#!/usr/bin/env python3 -""" -获取知识库目录 -""" - -__author__ = "佐井" -__created__ = "2024-09-17 15:17:11" - from typing import Any, Union from core.tools.entities.tool_entities import ToolInvokeMessage diff --git a/api/core/tools/provider/builtin/aliyuque/tools/describe_book_table_of_contents.yaml b/api/core/tools/provider/builtin/aliyuque/tools/describe_book_table_of_contents.yaml index 0c2bd22132..0a481b59eb 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/describe_book_table_of_contents.yaml +++ b/api/core/tools/provider/builtin/aliyuque/tools/describe_book_table_of_contents.yaml @@ -13,7 +13,7 @@ description: parameters: - name: book_id - type: number + type: string required: true form: llm label: diff --git a/api/core/tools/provider/builtin/aliyuque/tools/describe_document_content.py b/api/core/tools/provider/builtin/aliyuque/tools/describe_document_content.py index 1e70593879..50a9b37cf6 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/describe_document_content.py +++ b/api/core/tools/provider/builtin/aliyuque/tools/describe_document_content.py @@ -1,10 +1,3 @@ -""" -获取文档 -""" - -__author__ = "佐井" -__created__ = "2024-06-02 07:11:45" - import json from typing import Any, Union from urllib.parse import urlparse @@ -37,19 +30,19 @@ class AliYuqueDescribeDocumentContentTool(AliYuqueTool, BuiltinTool): book_slug = path_parts[-2] group_id = path_parts[-3] - # 1. 请求首页信息,获取book_id new_params["group_login"] = group_id new_params["book_slug"] = book_slug index_page = json.loads( - self.request("GET", token, new_params, "/api/v2/repos/{group_login}/{book_slug}/index_page") + self.request("GET", token, new_params, + "/api/v2/repos/{group_login}/{book_slug}/index_page") ) book_id = index_page.get("data", {}).get("book", {}).get("id") if not book_id: raise Exception(f"can not parse book_id from {index_page}") - # 2. 获取文档内容 new_params["book_id"] = book_id new_params["id"] = doc_id - data = self.request("GET", token, new_params, "/api/v2/repos/{book_id}/docs/{id}") + data = self.request("GET", token, new_params, + "/api/v2/repos/{book_id}/docs/{id}") data = json.loads(data) body_only = tool_parameters.get("body_only") or "" if body_only.lower() == "true": diff --git a/api/core/tools/provider/builtin/aliyuque/tools/describe_documents.py b/api/core/tools/provider/builtin/aliyuque/tools/describe_documents.py index ed1b2a8643..75436e5b1b 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/describe_documents.py +++ b/api/core/tools/provider/builtin/aliyuque/tools/describe_documents.py @@ -1,10 +1,3 @@ -""" -获取文档 -""" - -__author__ = "佐井" -__created__ = "2024-06-01 10:45:20" - from typing import Any, Union from core.tools.entities.tool_entities import ToolInvokeMessage @@ -20,5 +13,6 @@ class AliYuqueDescribeDocumentsTool(AliYuqueTool, BuiltinTool): if not token: raise Exception("token is required") return self.create_text_message( - self.request("GET", token, tool_parameters, "/api/v2/repos/{book_id}/docs/{id}") + self.request("GET", token, tool_parameters, + "/api/v2/repos/{book_id}/docs/{id}") ) diff --git a/api/core/tools/provider/builtin/aliyuque/tools/describe_documents.yaml b/api/core/tools/provider/builtin/aliyuque/tools/describe_documents.yaml index 5156345d71..0b14c1afba 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/describe_documents.yaml +++ b/api/core/tools/provider/builtin/aliyuque/tools/describe_documents.yaml @@ -14,7 +14,7 @@ description: parameters: - name: book_id - type: number + type: string required: true form: llm label: diff --git a/api/core/tools/provider/builtin/aliyuque/tools/update_book_table_of_contents.py b/api/core/tools/provider/builtin/aliyuque/tools/update_book_table_of_contents.py index 932559445e..ca0a3909f8 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/update_book_table_of_contents.py +++ b/api/core/tools/provider/builtin/aliyuque/tools/update_book_table_of_contents.py @@ -1,11 +1,3 @@ -#!/usr/bin/env python3 -""" -获取知识库目录 -""" - -__author__ = "佐井" -__created__ = "2024-09-17 15:17:11" - from typing import Any, Union from core.tools.entities.tool_entities import ToolInvokeMessage diff --git a/api/core/tools/provider/builtin/aliyuque/tools/update_book_table_of_contents.yaml b/api/core/tools/provider/builtin/aliyuque/tools/update_book_table_of_contents.yaml index f0c0024f17..f85970348b 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/update_book_table_of_contents.yaml +++ b/api/core/tools/provider/builtin/aliyuque/tools/update_book_table_of_contents.yaml @@ -13,7 +13,7 @@ description: parameters: - name: book_id - type: number + type: string required: true form: llm label: diff --git a/api/core/tools/provider/builtin/aliyuque/tools/update_document.py b/api/core/tools/provider/builtin/aliyuque/tools/update_document.py index 0c6e0205e1..a6bcb1fcc2 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/update_document.py +++ b/api/core/tools/provider/builtin/aliyuque/tools/update_document.py @@ -1,10 +1,3 @@ -""" -更新文档 -""" - -__author__ = "佐井" -__created__ = "2024-06-19 16:50:07" - from typing import Any, Union from core.tools.entities.tool_entities import ToolInvokeMessage @@ -20,5 +13,6 @@ class AliYuqueUpdateDocumentTool(AliYuqueTool, BuiltinTool): if not token: raise Exception("token is required") return self.create_text_message( - self.request("PUT", token, tool_parameters, "/api/v2/repos/{book_id}/docs/{id}") + self.request("PUT", token, tool_parameters, + "/api/v2/repos/{book_id}/docs/{id}") ) diff --git a/api/core/tools/provider/builtin/aliyuque/tools/update_document.yaml b/api/core/tools/provider/builtin/aliyuque/tools/update_document.yaml index 87f88c9b1b..c2da6b179a 100644 --- a/api/core/tools/provider/builtin/aliyuque/tools/update_document.yaml +++ b/api/core/tools/provider/builtin/aliyuque/tools/update_document.yaml @@ -12,7 +12,7 @@ description: llm: Update doc in a knowledge base via ID/path. parameters: - name: book_id - type: number + type: string required: true form: llm label: diff --git a/api/core/tools/provider/builtin/chart/chart.py b/api/core/tools/provider/builtin/chart/chart.py index 8a24d33428..209d6ecba4 100644 --- a/api/core/tools/provider/builtin/chart/chart.py +++ b/api/core/tools/provider/builtin/chart/chart.py @@ -1,77 +1,36 @@ import matplotlib.pyplot as plt -from fontTools.ttLib import TTFont -from matplotlib.font_manager import findSystemFonts +from matplotlib.font_manager import FontProperties -from core.tools.errors import ToolProviderCredentialValidationError -from core.tools.provider.builtin.chart.tools.line import LinearChartTool from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController + +def set_chinese_font(): + font_list = [ + "PingFang SC", + "SimHei", + "Microsoft YaHei", + "STSong", + "SimSun", + "Arial Unicode MS", + "Noto Sans CJK SC", + "Noto Sans CJK JP", + ] + + for font in font_list: + chinese_font = FontProperties(font) + if chinese_font.get_name() == font: + return chinese_font + + return FontProperties() + + # use a business theme plt.style.use("seaborn-v0_8-darkgrid") plt.rcParams["axes.unicode_minus"] = False - - -def init_fonts(): - fonts = findSystemFonts() - - popular_unicode_fonts = [ - "Arial Unicode MS", - "DejaVu Sans", - "DejaVu Sans Mono", - "DejaVu Serif", - "FreeMono", - "FreeSans", - "FreeSerif", - "Liberation Mono", - "Liberation Sans", - "Liberation Serif", - "Noto Mono", - "Noto Sans", - "Noto Serif", - "Open Sans", - "Roboto", - "Source Code Pro", - "Source Sans Pro", - "Source Serif Pro", - "Ubuntu", - "Ubuntu Mono", - ] - - supported_fonts = [] - - for font_path in fonts: - try: - font = TTFont(font_path) - # get family name - family_name = font["name"].getName(1, 3, 1).toUnicode() - if family_name in popular_unicode_fonts: - supported_fonts.append(family_name) - except: - pass - - plt.rcParams["font.family"] = "sans-serif" - # sort by order of popular_unicode_fonts - for font in popular_unicode_fonts: - if font in supported_fonts: - plt.rcParams["font.sans-serif"] = font - break - - -init_fonts() +font_properties = set_chinese_font() +plt.rcParams["font.family"] = font_properties.get_name() class ChartProvider(BuiltinToolProviderController): def _validate_credentials(self, credentials: dict) -> None: - try: - LinearChartTool().fork_tool_runtime( - runtime={ - "credentials": credentials, - } - ).invoke( - user_id="", - tool_parameters={ - "data": "1,3,5,7,9,2,4,6,8,10", - }, - ) - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) + pass diff --git a/api/core/tools/provider/builtin/comfyui/tools/comfyui_client.py b/api/core/tools/provider/builtin/comfyui/tools/comfyui_client.py index a41d34d40f..d4bf713441 100644 --- a/api/core/tools/provider/builtin/comfyui/tools/comfyui_client.py +++ b/api/core/tools/provider/builtin/comfyui/tools/comfyui_client.py @@ -1,3 +1,5 @@ +import base64 +import io import json import random import uuid @@ -6,45 +8,48 @@ import httpx from websocket import WebSocket from yarl import URL +from core.file.file_manager import _get_encoded_string +from core.file.models import File + class ComfyUiClient: def __init__(self, base_url: str): self.base_url = URL(base_url) - def get_history(self, prompt_id: str): + def get_history(self, prompt_id: str) -> dict: res = httpx.get(str(self.base_url / "history"), params={"prompt_id": prompt_id}) history = res.json()[prompt_id] return history - def get_image(self, filename: str, subfolder: str, folder_type: str): + def get_image(self, filename: str, subfolder: str, folder_type: str) -> bytes: response = httpx.get( str(self.base_url / "view"), params={"filename": filename, "subfolder": subfolder, "type": folder_type}, ) return response.content - def upload_image(self, input_path: str, name: str, image_type: str = "input", overwrite: bool = False): - # plan to support img2img in dify 0.10.0 - with open(input_path, "rb") as file: - files = {"image": (name, file, "image/png")} - data = {"type": image_type, "overwrite": str(overwrite).lower()} + def upload_image(self, image_file: File) -> dict: + image_content = base64.b64decode(_get_encoded_string(image_file)) + file = io.BytesIO(image_content) + files = {"image": (image_file.filename, file, image_file.mime_type), "overwrite": "true"} + res = httpx.post(str(self.base_url / "upload/image"), files=files) + return res.json() - res = httpx.post(str(self.base_url / "upload/image"), data=data, files=files) - return res - - def queue_prompt(self, client_id: str, prompt: dict): + def queue_prompt(self, client_id: str, prompt: dict) -> str: res = httpx.post(str(self.base_url / "prompt"), json={"client_id": client_id, "prompt": prompt}) prompt_id = res.json()["prompt_id"] return prompt_id - def open_websocket_connection(self): + def open_websocket_connection(self) -> tuple[WebSocket, str]: client_id = str(uuid.uuid4()) ws = WebSocket() ws_address = f"ws://{self.base_url.authority}/ws?clientId={client_id}" ws.connect(ws_address) return ws, client_id - def set_prompt(self, origin_prompt: dict, positive_prompt: str, negative_prompt: str = ""): + def set_prompt( + self, origin_prompt: dict, positive_prompt: str, negative_prompt: str = "", image_name: str = "" + ) -> dict: """ find the first KSampler, then can find the prompt node through it. """ @@ -58,6 +63,10 @@ class ComfyUiClient: if negative_prompt != "": negative_input_id = prompt.get(k_sampler)["inputs"]["negative"][0] prompt.get(negative_input_id)["inputs"]["text"] = negative_prompt + + if image_name != "": + image_loader = [key for key, value in id_to_class_type.items() if value == "LoadImage"][0] + prompt.get(image_loader)["inputs"]["image"] = image_name return prompt def track_progress(self, prompt: dict, ws: WebSocket, prompt_id: str): @@ -89,7 +98,7 @@ class ComfyUiClient: else: continue - def generate_image_by_prompt(self, prompt: dict): + def generate_image_by_prompt(self, prompt: dict) -> list[bytes]: try: ws, client_id = self.open_websocket_connection() prompt_id = self.queue_prompt(client_id, prompt) diff --git a/api/core/tools/provider/builtin/comfyui/tools/comfyui_workflow.py b/api/core/tools/provider/builtin/comfyui/tools/comfyui_workflow.py index e4df9f8c3b..11320d5d0f 100644 --- a/api/core/tools/provider/builtin/comfyui/tools/comfyui_workflow.py +++ b/api/core/tools/provider/builtin/comfyui/tools/comfyui_workflow.py @@ -2,10 +2,9 @@ import json from typing import Any from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.provider.builtin.comfyui.tools.comfyui_client import ComfyUiClient from core.tools.tool.builtin_tool import BuiltinTool -from .comfyui_client import ComfyUiClient - class ComfyUIWorkflowTool(BuiltinTool): def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]: @@ -14,13 +13,16 @@ class ComfyUIWorkflowTool(BuiltinTool): positive_prompt = tool_parameters.get("positive_prompt") negative_prompt = tool_parameters.get("negative_prompt") workflow = tool_parameters.get("workflow_json") + image_name = "" + if image := tool_parameters.get("image"): + image_name = comfyui.upload_image(image).get("name") try: origin_prompt = json.loads(workflow) except: return self.create_text_message("the Workflow JSON is not correct") - prompt = comfyui.set_prompt(origin_prompt, positive_prompt, negative_prompt) + prompt = comfyui.set_prompt(origin_prompt, positive_prompt, negative_prompt, image_name) images = comfyui.generate_image_by_prompt(prompt) result = [] for img in images: diff --git a/api/core/tools/provider/builtin/comfyui/tools/comfyui_workflow.yaml b/api/core/tools/provider/builtin/comfyui/tools/comfyui_workflow.yaml index 6342d6d468..55fcdad825 100644 --- a/api/core/tools/provider/builtin/comfyui/tools/comfyui_workflow.yaml +++ b/api/core/tools/provider/builtin/comfyui/tools/comfyui_workflow.yaml @@ -24,6 +24,13 @@ parameters: zh_Hans: 负面提示词 llm_description: Negative prompt, you should describe the image you don't want to generate as a list of words as possible as detailed, the prompt must be written in English. form: llm + - name: image + type: file + label: + en_US: Input Image + zh_Hans: 输入的图片 + llm_description: The input image, used to transfer to the comfyui workflow to generate another image. + form: llm - name: workflow_json type: string required: true diff --git a/api/core/tools/provider/builtin/dalle/tools/dalle3.py b/api/core/tools/provider/builtin/dalle/tools/dalle3.py index a8c647d71e..af9aa6abb4 100644 --- a/api/core/tools/provider/builtin/dalle/tools/dalle3.py +++ b/api/core/tools/provider/builtin/dalle/tools/dalle3.py @@ -66,7 +66,7 @@ class DallE3Tool(BuiltinTool): for image in response.data: mime_type, blob_image = DallE3Tool._decode_image(image.b64_json) blob_message = self.create_blob_message( - blob=blob_image, meta={"mime_type": mime_type}, save_as=self.VariableKey.IMAGE.value + blob=blob_image, meta={"mime_type": mime_type}, save_as=self.VariableKey.IMAGE ) result.append(blob_message) return result diff --git a/api/core/tools/provider/builtin/discord/tools/discord_webhook.py b/api/core/tools/provider/builtin/discord/tools/discord_webhook.py index 7fdf791aba..c1834a1a26 100644 --- a/api/core/tools/provider/builtin/discord/tools/discord_webhook.py +++ b/api/core/tools/provider/builtin/discord/tools/discord_webhook.py @@ -21,7 +21,6 @@ class DiscordWebhookTool(BuiltinTool): return self.create_text_message("Invalid parameter content") webhook_url = tool_parameters.get("webhook_url", "") - if not webhook_url.startswith("https://discord.com/api/webhooks/"): return self.create_text_message( f"Invalid parameter webhook_url ${webhook_url}, \ @@ -31,13 +30,14 @@ class DiscordWebhookTool(BuiltinTool): headers = { "Content-Type": "application/json", } - params = {} payload = { + "username": tool_parameters.get("username") or user_id, "content": content, + "avatar_url": tool_parameters.get("avatar_url") or None, } try: - res = httpx.post(webhook_url, headers=headers, params=params, json=payload) + res = httpx.post(webhook_url, headers=headers, json=payload) if res.is_success: return self.create_text_message("Text message was sent successfully") else: diff --git a/api/core/tools/provider/builtin/discord/tools/discord_webhook.yaml b/api/core/tools/provider/builtin/discord/tools/discord_webhook.yaml index bb3fa43f24..6847b973ca 100644 --- a/api/core/tools/provider/builtin/discord/tools/discord_webhook.yaml +++ b/api/core/tools/provider/builtin/discord/tools/discord_webhook.yaml @@ -38,3 +38,28 @@ parameters: pt_BR: Content to sent to the channel or person. llm_description: Content of the message form: llm + - name: username + type: string + required: false + label: + en_US: Discord Webhook Username + zh_Hans: Discord Webhook用户名 + pt_BR: Discord Webhook Username + human_description: + en_US: Discord Webhook Username + zh_Hans: Discord Webhook用户名 + pt_BR: Discord Webhook Username + llm_description: Discord Webhook Username + form: llm + - name: avatar_url + type: string + required: false + label: + en_US: Discord Webhook Avatar + zh_Hans: Discord Webhook头像 + pt_BR: Discord Webhook Avatar + human_description: + en_US: Discord Webhook Avatar URL + zh_Hans: Discord Webhook头像地址 + pt_BR: Discord Webhook Avatar URL + form: form diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_ai.yaml b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_ai.yaml index 21cbae6bd3..dd049d3b5a 100644 --- a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_ai.yaml +++ b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_ai.yaml @@ -37,7 +37,7 @@ parameters: - value: mixtral-8x7b label: en_US: Mixtral - default: gpt-3.5 + default: gpt-4o-mini label: en_US: Choose Model zh_Hans: 选择模型 diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_img.py b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_img.py index 396570248a..54bb38755a 100644 --- a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_img.py +++ b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_img.py @@ -2,7 +2,6 @@ from typing import Any from duckduckgo_search import DDGS -from core.file.file_obj import FileTransferMethod from core.tools.entities.tool_entities import ToolInvokeMessage from core.tools.tool.builtin_tool import BuiltinTool @@ -20,11 +19,9 @@ class DuckDuckGoImageSearchTool(BuiltinTool): "max_results": tool_parameters.get("max_results"), } response = DDGS().images(**query_dict) - result = [] + markdown_result = "\n\n" + json_result = [] for res in response: - res["transfer_method"] = FileTransferMethod.REMOTE_URL - msg = ToolInvokeMessage( - type=ToolInvokeMessage.MessageType.IMAGE_LINK, message=res.get("image"), save_as="", meta=res - ) - result.append(msg) - return result + markdown_result += f"![{res.get('title') or ''}]({res.get('image') or ''})" + json_result.append(self.create_json_message(res)) + return [self.create_text_message(markdown_result)] + json_result diff --git a/api/core/tools/provider/builtin/feishu_base/_assets/icon.svg b/api/core/tools/provider/builtin/feishu_base/_assets/icon.svg deleted file mode 100644 index 2663a0f59e..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/_assets/icon.svg +++ /dev/null @@ -1,47 +0,0 @@ - - - - diff --git a/api/core/tools/provider/builtin/feishu_base/feishu_base.py b/api/core/tools/provider/builtin/feishu_base/feishu_base.py index 04056af53b..f301ec5355 100644 --- a/api/core/tools/provider/builtin/feishu_base/feishu_base.py +++ b/api/core/tools/provider/builtin/feishu_base/feishu_base.py @@ -1,8 +1,7 @@ -from core.tools.provider.builtin.feishu_base.tools.get_tenant_access_token import GetTenantAccessTokenTool from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController +from core.tools.utils.feishu_api_utils import auth class FeishuBaseProvider(BuiltinToolProviderController): def _validate_credentials(self, credentials: dict) -> None: - GetTenantAccessTokenTool() - pass + auth(credentials) diff --git a/api/core/tools/provider/builtin/feishu_base/feishu_base.yaml b/api/core/tools/provider/builtin/feishu_base/feishu_base.yaml index f3dcbb6136..456dd8c88f 100644 --- a/api/core/tools/provider/builtin/feishu_base/feishu_base.yaml +++ b/api/core/tools/provider/builtin/feishu_base/feishu_base.yaml @@ -5,10 +5,32 @@ identity: en_US: Feishu Base zh_Hans: 飞书多维表格 description: - en_US: Feishu Base - zh_Hans: 飞书多维表格 - icon: icon.svg + en_US: | + Feishu base, requires the following permissions: bitable:app. + zh_Hans: | + 飞书多维表格,需要开通以下权限: bitable:app。 + icon: icon.png tags: - social - productivity credentials_for_provider: + app_id: + type: text-input + required: true + label: + en_US: APP ID + placeholder: + en_US: Please input your feishu app id + zh_Hans: 请输入你的飞书 app id + help: + en_US: Get your app_id and app_secret from Feishu + zh_Hans: 从飞书获取您的 app_id 和 app_secret + url: https://open.larkoffice.com/app + app_secret: + type: secret-input + required: true + label: + en_US: APP Secret + placeholder: + en_US: Please input your app secret + zh_Hans: 请输入你的飞书 app secret diff --git a/api/core/tools/provider/builtin/feishu_base/tools/add_base_record.py b/api/core/tools/provider/builtin/feishu_base/tools/add_base_record.py deleted file mode 100644 index 4a605fbffe..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/add_base_record.py +++ /dev/null @@ -1,56 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class AddBaseRecordTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables/{table_id}/records" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - table_id = tool_parameters.get("table_id", "") - if not table_id: - return self.create_text_message("Invalid parameter table_id") - - fields = tool_parameters.get("fields", "") - if not fields: - return self.create_text_message("Invalid parameter fields") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = {} - payload = {"fields": json.loads(fields)} - - try: - res = httpx.post( - url.format(app_token=app_token, table_id=table_id), - headers=headers, - params=params, - json=payload, - timeout=30, - ) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to add base record, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to add base record. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/add_base_record.yaml b/api/core/tools/provider/builtin/feishu_base/tools/add_base_record.yaml deleted file mode 100644 index 3ce0154efd..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/add_base_record.yaml +++ /dev/null @@ -1,66 +0,0 @@ -identity: - name: add_base_record - author: Doug Lea - label: - en_US: Add Base Record - zh_Hans: 在多维表格数据表中新增一条记录 -description: - human: - en_US: Add Base Record - zh_Hans: | - 在多维表格数据表中新增一条记录,详细请参考:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/app-table-record/create - llm: Add a new record in the multidimensional table data table. -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: table_id - type: string - required: true - label: - en_US: table_id - zh_Hans: 多维表格的数据表 - human_description: - en_US: bitable table id - zh_Hans: 多维表格数据表的唯一标识符 table_id - llm_description: bitable table id - form: llm - - - name: fields - type: string - required: true - label: - en_US: fields - zh_Hans: 数据表的列字段内容 - human_description: - en_US: The fields of the Base data table are the columns of the data table. - zh_Hans: | - 要增加一行多维表格记录,字段结构拼接如下:{"多行文本":"多行文本内容","单选":"选项1","多选":["选项1","选项2"],"复选框":true,"人员":[{"id":"ou_2910013f1e6456f16a0ce75ede950a0a"}],"群组":[{"id":"oc_cd07f55f14d6f4a4f1b51504e7e97f48"}],"电话号码":"13026162666"} - 当前接口支持的字段类型为:多行文本、单选、条码、多选、日期、人员、附件、复选框、超链接、数字、单向关联、双向关联、电话号码、地理位置。 - 不同类型字段的数据结构请参考数据结构概述:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/bitable-structure - llm_description: | - 要增加一行多维表格记录,字段结构拼接如下:{"多行文本":"多行文本内容","单选":"选项1","多选":["选项1","选项2"],"复选框":true,"人员":[{"id":"ou_2910013f1e6456f16a0ce75ede950a0a"}],"群组":[{"id":"oc_cd07f55f14d6f4a4f1b51504e7e97f48"}],"电话号码":"13026162666"} - 当前接口支持的字段类型为:多行文本、单选、条码、多选、日期、人员、附件、复选框、超链接、数字、单向关联、双向关联、电话号码、地理位置。 - 不同类型字段的数据结构请参考数据结构概述:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/bitable-structure - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/create_base.py b/api/core/tools/provider/builtin/feishu_base/tools/create_base.py index 6b755e2007..f074acc5ff 100644 --- a/api/core/tools/provider/builtin/feishu_base/tools/create_base.py +++ b/api/core/tools/provider/builtin/feishu_base/tools/create_base.py @@ -1,41 +1,18 @@ -import json -from typing import Any, Union - -import httpx +from typing import Any from core.tools.entities.tool_entities import ToolInvokeMessage from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest class CreateBaseTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps" + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") + name = tool_parameters.get("name") + folder_token = tool_parameters.get("folder_token") - name = tool_parameters.get("name", "") - folder_token = tool_parameters.get("folder_token", "") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = {} - payload = {"name": name, "folder_token": folder_token} - - try: - res = httpx.post(url, headers=headers, params=params, json=payload, timeout=30) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to create base, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to create base. {}".format(e)) + res = client.create_base(name, folder_token) + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/create_base.yaml b/api/core/tools/provider/builtin/feishu_base/tools/create_base.yaml index 76c76a916d..3ec91a90e7 100644 --- a/api/core/tools/provider/builtin/feishu_base/tools/create_base.yaml +++ b/api/core/tools/provider/builtin/feishu_base/tools/create_base.yaml @@ -6,32 +6,21 @@ identity: zh_Hans: 创建多维表格 description: human: - en_US: Create base + en_US: Create Multidimensional Table in Specified Directory zh_Hans: 在指定目录下创建多维表格 - llm: A tool for create a multidimensional table in the specified directory. + llm: A tool for creating a multidimensional table in a specified directory. (在指定目录下创建多维表格) parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - name: name type: string required: false label: en_US: name - zh_Hans: name + zh_Hans: 多维表格 App 名字 human_description: - en_US: Base App Name - zh_Hans: 多维表格App名字 - llm_description: Base App Name + en_US: | + Name of the multidimensional table App. Example value: "A new multidimensional table". + zh_Hans: 多维表格 App 名字,示例值:"一篇新的多维表格"。 + llm_description: 多维表格 App 名字,示例值:"一篇新的多维表格"。 form: llm - name: folder_token @@ -39,9 +28,15 @@ parameters: required: false label: en_US: folder_token - zh_Hans: 多维表格App归属文件夹 + zh_Hans: 多维表格 App 归属文件夹 human_description: - en_US: Base App home folder. The default is empty, indicating that Base will be created in the cloud space root directory. - zh_Hans: 多维表格App归属文件夹。默认为空,表示多维表格将被创建在云空间根目录。 - llm_description: Base App home folder. The default is empty, indicating that Base will be created in the cloud space root directory. + en_US: | + Folder where the multidimensional table App belongs. Default is empty, meaning the table will be created in the root directory of the cloud space. Example values: Fa3sfoAgDlMZCcdcJy1cDFg8nJc or https://svi136aogf123.feishu.cn/drive/folder/Fa3sfoAgDlMZCcdcJy1cDFg8nJc. + The folder_token must be an existing folder and supports inputting folder token or folder URL. + zh_Hans: | + 多维表格 App 归属文件夹。默认为空,表示多维表格将被创建在云空间根目录。示例值: Fa3sfoAgDlMZCcdcJy1cDFg8nJc 或者 https://svi136aogf123.feishu.cn/drive/folder/Fa3sfoAgDlMZCcdcJy1cDFg8nJc。 + folder_token 必须是已存在的文件夹,支持输入文件夹 token 或者文件夹 URL。 + llm_description: | + 多维表格 App 归属文件夹。默认为空,表示多维表格将被创建在云空间根目录。示例值: Fa3sfoAgDlMZCcdcJy1cDFg8nJc 或者 https://svi136aogf123.feishu.cn/drive/folder/Fa3sfoAgDlMZCcdcJy1cDFg8nJc。 + folder_token 必须是已存在的文件夹,支持输入文件夹 token 或者文件夹 URL。 form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/create_base_table.py b/api/core/tools/provider/builtin/feishu_base/tools/create_base_table.py deleted file mode 100644 index b05d700113..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/create_base_table.py +++ /dev/null @@ -1,48 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class CreateBaseTableTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - name = tool_parameters.get("name", "") - - fields = tool_parameters.get("fields", "") - if not fields: - return self.create_text_message("Invalid parameter fields") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = {} - payload = {"table": {"name": name, "fields": json.loads(fields)}} - - try: - res = httpx.post(url.format(app_token=app_token), headers=headers, params=params, json=payload, timeout=30) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to create base table, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to create base table. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/create_base_table.yaml b/api/core/tools/provider/builtin/feishu_base/tools/create_base_table.yaml deleted file mode 100644 index 48c46bec14..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/create_base_table.yaml +++ /dev/null @@ -1,106 +0,0 @@ -identity: - name: create_base_table - author: Doug Lea - label: - en_US: Create Base Table - zh_Hans: 多维表格新增一个数据表 -description: - human: - en_US: Create base table - zh_Hans: | - 多维表格新增一个数据表,详细请参考:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/app-table/create - llm: A tool for add a new data table to the multidimensional table. -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: name - type: string - required: false - label: - en_US: name - zh_Hans: name - human_description: - en_US: Multidimensional table data table name - zh_Hans: 多维表格数据表名称 - llm_description: Multidimensional table data table name - form: llm - - - name: fields - type: string - required: true - label: - en_US: fields - zh_Hans: fields - human_description: - en_US: Initial fields of the data table - zh_Hans: | - 数据表的初始字段,格式为:[{"field_name":"多行文本","type":1},{"field_name":"数字","type":2},{"field_name":"单选","type":3},{"field_name":"多选","type":4},{"field_name":"日期","type":5}]。 - field_name:字段名; - type: 字段类型;可选值有 - 1:多行文本 - 2:数字 - 3:单选 - 4:多选 - 5:日期 - 7:复选框 - 11:人员 - 13:电话号码 - 15:超链接 - 17:附件 - 18:单向关联 - 20:公式 - 21:双向关联 - 22:地理位置 - 23:群组 - 1001:创建时间 - 1002:最后更新时间 - 1003:创建人 - 1004:修改人 - 1005:自动编号 - llm_description: | - 数据表的初始字段,格式为:[{"field_name":"多行文本","type":1},{"field_name":"数字","type":2},{"field_name":"单选","type":3},{"field_name":"多选","type":4},{"field_name":"日期","type":5}]。 - field_name:字段名; - type: 字段类型;可选值有 - 1:多行文本 - 2:数字 - 3:单选 - 4:多选 - 5:日期 - 7:复选框 - 11:人员 - 13:电话号码 - 15:超链接 - 17:附件 - 18:单向关联 - 20:公式 - 21:双向关联 - 22:地理位置 - 23:群组 - 1001:创建时间 - 1002:最后更新时间 - 1003:创建人 - 1004:修改人 - 1005:自动编号 - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.py b/api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.py deleted file mode 100644 index 862eb2171b..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.py +++ /dev/null @@ -1,56 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DeleteBaseRecordsTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables/{table_id}/records/batch_delete" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - table_id = tool_parameters.get("table_id", "") - if not table_id: - return self.create_text_message("Invalid parameter table_id") - - record_ids = tool_parameters.get("record_ids", "") - if not record_ids: - return self.create_text_message("Invalid parameter record_ids") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = {} - payload = {"records": json.loads(record_ids)} - - try: - res = httpx.post( - url.format(app_token=app_token, table_id=table_id), - headers=headers, - params=params, - json=payload, - timeout=30, - ) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to delete base records, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to delete base records. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.yaml b/api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.yaml deleted file mode 100644 index 595b287029..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_records.yaml +++ /dev/null @@ -1,60 +0,0 @@ -identity: - name: delete_base_records - author: Doug Lea - label: - en_US: Delete Base Records - zh_Hans: 在多维表格数据表中删除多条记录 -description: - human: - en_US: Delete base records - zh_Hans: | - 该接口用于删除多维表格数据表中的多条记录,单次调用中最多删除 500 条记录。 - llm: A tool for delete multiple records in a multidimensional table data table, up to 500 records can be deleted in a single call. -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: table_id - type: string - required: true - label: - en_US: table_id - zh_Hans: 多维表格的数据表 - human_description: - en_US: bitable table id - zh_Hans: 多维表格数据表的唯一标识符 table_id - llm_description: bitable table id - form: llm - - - name: record_ids - type: string - required: true - label: - en_US: record_ids - zh_Hans: record_ids - human_description: - en_US: A list of multiple record IDs to be deleted, for example ["recwNXzPQv","recpCsf4ME"] - zh_Hans: 待删除的多条记录id列表,示例为 ["recwNXzPQv","recpCsf4ME"] - llm_description: A list of multiple record IDs to be deleted, for example ["recwNXzPQv","recpCsf4ME"] - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.py b/api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.py deleted file mode 100644 index f512186303..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.py +++ /dev/null @@ -1,46 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class DeleteBaseTablesTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables/batch_delete" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - table_ids = tool_parameters.get("table_ids", "") - if not table_ids: - return self.create_text_message("Invalid parameter table_ids") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = {} - payload = {"table_ids": json.loads(table_ids)} - - try: - res = httpx.post(url.format(app_token=app_token), headers=headers, params=params, json=payload, timeout=30) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to delete base tables, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to delete base tables. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.yaml b/api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.yaml deleted file mode 100644 index 5d72814363..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/delete_base_tables.yaml +++ /dev/null @@ -1,48 +0,0 @@ -identity: - name: delete_base_tables - author: Doug Lea - label: - en_US: Delete Base Tables - zh_Hans: 删除多维表格中的数据表 -description: - human: - en_US: Delete base tables - zh_Hans: | - 删除多维表格中的数据表 - llm: A tool for deleting a data table in a multidimensional table -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: table_ids - type: string - required: true - label: - en_US: table_ids - zh_Hans: table_ids - human_description: - en_US: The ID list of the data tables to be deleted. Currently, a maximum of 50 data tables can be deleted at a time. The example is ["tbl1TkhyTWDkSoZ3","tblsRc9GRRXKqhvW"] - zh_Hans: 待删除数据表的id列表,当前一次操作最多支持50个数据表,示例为 ["tbl1TkhyTWDkSoZ3","tblsRc9GRRXKqhvW"] - llm_description: The ID list of the data tables to be deleted. Currently, a maximum of 50 data tables can be deleted at a time. The example is ["tbl1TkhyTWDkSoZ3","tblsRc9GRRXKqhvW"] - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.py b/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.py index f664bbeed0..a74e9be288 100644 --- a/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.py +++ b/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.py @@ -1,39 +1,17 @@ -import json -from typing import Any, Union - -import httpx +from typing import Any from core.tools.entities.tool_entities import ToolInvokeMessage from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest class GetBaseInfoTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}" + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") + app_token = tool_parameters.get("app_token") - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - try: - res = httpx.get(url.format(app_token=app_token), headers=headers, timeout=30) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to get base info, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to get base info. {}".format(e)) + res = client.get_base_info(app_token) + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.yaml b/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.yaml index de08689018..eb0e7a26c0 100644 --- a/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.yaml +++ b/api/core/tools/provider/builtin/feishu_base/tools/get_base_info.yaml @@ -6,49 +6,18 @@ identity: zh_Hans: 获取多维表格元数据 description: human: - en_US: Get base info - zh_Hans: | - 获取多维表格元数据,响应体如下: - { - "code": 0, - "msg": "success", - "data": { - "app": { - "app_token": "appbcbWCzen6D8dezhoCH2RpMAh", - "name": "mybase", - "revision": 1, - "is_advanced": false, - "time_zone": "Asia/Beijing" - } - } - } - app_token: 多维表格的 app_token; - name: 多维表格的名字; - revision: 多维表格的版本号; - is_advanced: 多维表格是否开启了高级权限。取值包括:(true-表示开启了高级权限,false-表示关闭了高级权限); - time_zone: 文档时区; - llm: A tool to get Base Metadata, imported parameter is Unique Device Identifier app_token of Base, app_token is required. + en_US: Get Metadata Information of Specified Multidimensional Table + zh_Hans: 获取指定多维表格的元数据信息 + llm: A tool for getting metadata information of a specified multidimensional table. (获取指定多维表格的元数据信息) parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - name: app_token type: string required: true label: en_US: app_token - zh_Hans: 多维表格 + zh_Hans: app_token human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token + en_US: Unique identifier for the multidimensional table, supports inputting document URL. + zh_Hans: 多维表格的唯一标识符,支持输入文档 URL。 + llm_description: 多维表格的唯一标识符,支持输入文档 URL。 form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.py b/api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.py deleted file mode 100644 index 2ea61d0068..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.py +++ /dev/null @@ -1,48 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class GetTenantAccessTokenTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/auth/v3/tenant_access_token/internal" - - app_id = tool_parameters.get("app_id", "") - if not app_id: - return self.create_text_message("Invalid parameter app_id") - - app_secret = tool_parameters.get("app_secret", "") - if not app_secret: - return self.create_text_message("Invalid parameter app_secret") - - headers = { - "Content-Type": "application/json", - } - params = {} - payload = {"app_id": app_id, "app_secret": app_secret} - - """ - { - "code": 0, - "msg": "ok", - "tenant_access_token": "t-caecc734c2e3328a62489fe0648c4b98779515d3", - "expire": 7200 - } - """ - try: - res = httpx.post(url, headers=headers, params=params, json=payload, timeout=30) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to get tenant access token, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to get tenant access token. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.yaml b/api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.yaml deleted file mode 100644 index 88acc27e06..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/get_tenant_access_token.yaml +++ /dev/null @@ -1,39 +0,0 @@ -identity: - name: get_tenant_access_token - author: Doug Lea - label: - en_US: Get Tenant Access Token - zh_Hans: 获取飞书自建应用的 tenant_access_token -description: - human: - en_US: Get tenant access token - zh_Hans: | - 获取飞书自建应用的 tenant_access_token,响应体示例: - {"code":0,"msg":"ok","tenant_access_token":"t-caecc734c2e3328a62489fe0648c4b98779515d3","expire":7200} - tenant_access_token: 租户访问凭证; - expire: tenant_access_token 的过期时间,单位为秒; - llm: A tool for obtaining a tenant access token. The input parameters must include app_id and app_secret. -parameters: - - name: app_id - type: string - required: true - label: - en_US: app_id - zh_Hans: 应用唯一标识 - human_description: - en_US: app_id is the unique identifier of the Lark Open Platform application - zh_Hans: app_id 是飞书开放平台应用的唯一标识 - llm_description: app_id is the unique identifier of the Lark Open Platform application - form: llm - - - name: app_secret - type: secret-input - required: true - label: - en_US: app_secret - zh_Hans: 应用秘钥 - human_description: - en_US: app_secret is the secret key of the application - zh_Hans: app_secret 是应用的秘钥 - llm_description: app_secret is the secret key of the application - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/list_base_records.py b/api/core/tools/provider/builtin/feishu_base/tools/list_base_records.py deleted file mode 100644 index e579d02f69..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/list_base_records.py +++ /dev/null @@ -1,65 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class ListBaseRecordsTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables/{table_id}/records/search" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - table_id = tool_parameters.get("table_id", "") - if not table_id: - return self.create_text_message("Invalid parameter table_id") - - page_token = tool_parameters.get("page_token", "") - page_size = tool_parameters.get("page_size", "") - sort_condition = tool_parameters.get("sort_condition", "") - filter_condition = tool_parameters.get("filter_condition", "") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = { - "page_token": page_token, - "page_size": page_size, - } - - payload = {"automatic_fields": True} - if sort_condition: - payload["sort"] = json.loads(sort_condition) - if filter_condition: - payload["filter"] = json.loads(filter_condition) - - try: - res = httpx.post( - url.format(app_token=app_token, table_id=table_id), - headers=headers, - params=params, - json=payload, - timeout=30, - ) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to list base records, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to list base records. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/list_base_records.yaml b/api/core/tools/provider/builtin/feishu_base/tools/list_base_records.yaml deleted file mode 100644 index 8647c880a6..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/list_base_records.yaml +++ /dev/null @@ -1,108 +0,0 @@ -identity: - name: list_base_records - author: Doug Lea - label: - en_US: List Base Records - zh_Hans: 查询多维表格数据表中的现有记录 -description: - human: - en_US: List base records - zh_Hans: | - 查询多维表格数据表中的现有记录,单次最多查询 500 行记录,支持分页获取。 - llm: Query existing records in a multidimensional table data table. A maximum of 500 rows of records can be queried at a time, and paging retrieval is supported. -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: table_id - type: string - required: true - label: - en_US: table_id - zh_Hans: 多维表格的数据表 - human_description: - en_US: bitable table id - zh_Hans: 多维表格数据表的唯一标识符 table_id - llm_description: bitable table id - form: llm - - - name: page_token - type: string - required: false - label: - en_US: page_token - zh_Hans: 分页标记 - human_description: - en_US: Pagination mark. If it is not filled in the first request, it means to traverse from the beginning. - zh_Hans: 分页标记,第一次请求不填,表示从头开始遍历。 - llm_description: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 - form: llm - - - name: page_size - type: number - required: false - default: 20 - label: - en_US: page_size - zh_Hans: 分页大小 - human_description: - en_US: paging size - zh_Hans: 分页大小,默认值为 20,最大值为 100。 - llm_description: The default value of paging size is 20 and the maximum value is 100. - form: llm - - - name: sort_condition - type: string - required: false - label: - en_US: sort_condition - zh_Hans: 排序条件 - human_description: - en_US: sort condition - zh_Hans: | - 排序条件,格式为:[{"field_name":"多行文本","desc":true}]。 - field_name: 字段名称; - desc: 是否倒序排序; - llm_description: | - Sorting conditions, the format is: [{"field_name":"multi-line text","desc":true}]. - form: llm - - - name: filter_condition - type: string - required: false - label: - en_US: filter_condition - zh_Hans: 筛选条件 - human_description: - en_US: filter condition - zh_Hans: | - 筛选条件,格式为:{"conjunction":"and","conditions":[{"field_name":"字段1","operator":"is","value":["文本内容"]}]}。 - conjunction:条件逻辑连接词; - conditions:筛选条件集合; - field_name:筛选条件的左值,值为字段的名称; - operator:条件运算符; - value:目标值; - llm_description: | - The format of the filter condition is: {"conjunction":"and","conditions":[{"field_name":"Field 1","operator":"is","value":["text content"]}]}. - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.py b/api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.py deleted file mode 100644 index 4ec9a476bc..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.py +++ /dev/null @@ -1,47 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class ListBaseTablesTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - page_token = tool_parameters.get("page_token", "") - page_size = tool_parameters.get("page_size", "") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = { - "page_token": page_token, - "page_size": page_size, - } - - try: - res = httpx.get(url.format(app_token=app_token), headers=headers, params=params, timeout=30) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to list base tables, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to list base tables. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.yaml b/api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.yaml deleted file mode 100644 index 9887124a28..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/list_base_tables.yaml +++ /dev/null @@ -1,65 +0,0 @@ -identity: - name: list_base_tables - author: Doug Lea - label: - en_US: List Base Tables - zh_Hans: 根据 app_token 获取多维表格下的所有数据表 -description: - human: - en_US: List base tables - zh_Hans: | - 根据 app_token 获取多维表格下的所有数据表 - llm: A tool for getting all data tables under a multidimensional table based on app_token. -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: page_token - type: string - required: false - label: - en_US: page_token - zh_Hans: 分页标记 - human_description: - en_US: Pagination mark. If it is not filled in the first request, it means to traverse from the beginning. - zh_Hans: 分页标记,第一次请求不填,表示从头开始遍历。 - llm_description: | - Pagination token. If it is not filled in the first request, it means to start traversal from the beginning. - If there are more items in the pagination query result, a new page_token will be returned at the same time. - The page_token can be used to obtain the query result in the next traversal. - 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 - form: llm - - - name: page_size - type: number - required: false - default: 20 - label: - en_US: page_size - zh_Hans: 分页大小 - human_description: - en_US: paging size - zh_Hans: 分页大小,默认值为 20,最大值为 100。 - llm_description: The default value of paging size is 20 and the maximum value is 100. - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/read_base_record.py b/api/core/tools/provider/builtin/feishu_base/tools/read_base_record.py deleted file mode 100644 index fb818f8380..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/read_base_record.py +++ /dev/null @@ -1,49 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class ReadBaseRecordTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables/{table_id}/records/{record_id}" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - table_id = tool_parameters.get("table_id", "") - if not table_id: - return self.create_text_message("Invalid parameter table_id") - - record_id = tool_parameters.get("record_id", "") - if not record_id: - return self.create_text_message("Invalid parameter record_id") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - try: - res = httpx.get( - url.format(app_token=app_token, table_id=table_id, record_id=record_id), headers=headers, timeout=30 - ) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to read base record, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to read base record. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/read_base_record.yaml b/api/core/tools/provider/builtin/feishu_base/tools/read_base_record.yaml deleted file mode 100644 index 400e9a1021..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/read_base_record.yaml +++ /dev/null @@ -1,60 +0,0 @@ -identity: - name: read_base_record - author: Doug Lea - label: - en_US: Read Base Record - zh_Hans: 根据 record_id 的值检索多维表格数据表的记录 -description: - human: - en_US: Read base record - zh_Hans: | - 根据 record_id 的值检索多维表格数据表的记录 - llm: Retrieve records from a multidimensional table based on the value of record_id -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: table_id - type: string - required: true - label: - en_US: table_id - zh_Hans: 多维表格的数据表 - human_description: - en_US: bitable table id - zh_Hans: 多维表格数据表的唯一标识符 table_id - llm_description: bitable table id - form: llm - - - name: record_id - type: string - required: true - label: - en_US: record_id - zh_Hans: 单条记录的 id - human_description: - en_US: The id of a single record - zh_Hans: 单条记录的 id - llm_description: The id of a single record - form: llm diff --git a/api/core/tools/provider/builtin/feishu_base/tools/update_base_record.py b/api/core/tools/provider/builtin/feishu_base/tools/update_base_record.py deleted file mode 100644 index 6d7e33f3ff..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/update_base_record.py +++ /dev/null @@ -1,60 +0,0 @@ -import json -from typing import Any, Union - -import httpx - -from core.tools.entities.tool_entities import ToolInvokeMessage -from core.tools.tool.builtin_tool import BuiltinTool - - -class UpdateBaseRecordTool(BuiltinTool): - def _invoke( - self, user_id: str, tool_parameters: dict[str, Any] - ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: - url = "https://open.feishu.cn/open-apis/bitable/v1/apps/{app_token}/tables/{table_id}/records/{record_id}" - - access_token = tool_parameters.get("Authorization", "") - if not access_token: - return self.create_text_message("Invalid parameter access_token") - - app_token = tool_parameters.get("app_token", "") - if not app_token: - return self.create_text_message("Invalid parameter app_token") - - table_id = tool_parameters.get("table_id", "") - if not table_id: - return self.create_text_message("Invalid parameter table_id") - - record_id = tool_parameters.get("record_id", "") - if not record_id: - return self.create_text_message("Invalid parameter record_id") - - fields = tool_parameters.get("fields", "") - if not fields: - return self.create_text_message("Invalid parameter fields") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {access_token}", - } - - params = {} - payload = {"fields": json.loads(fields)} - - try: - res = httpx.put( - url.format(app_token=app_token, table_id=table_id, record_id=record_id), - headers=headers, - params=params, - json=payload, - timeout=30, - ) - res_json = res.json() - if res.is_success: - return self.create_text_message(text=json.dumps(res_json)) - else: - return self.create_text_message( - f"Failed to update base record, status code: {res.status_code}, response: {res.text}" - ) - except Exception as e: - return self.create_text_message("Failed to update base record. {}".format(e)) diff --git a/api/core/tools/provider/builtin/feishu_base/tools/update_base_record.yaml b/api/core/tools/provider/builtin/feishu_base/tools/update_base_record.yaml deleted file mode 100644 index 788798c4b3..0000000000 --- a/api/core/tools/provider/builtin/feishu_base/tools/update_base_record.yaml +++ /dev/null @@ -1,78 +0,0 @@ -identity: - name: update_base_record - author: Doug Lea - label: - en_US: Update Base Record - zh_Hans: 更新多维表格数据表中的一条记录 -description: - human: - en_US: Update base record - zh_Hans: | - 更新多维表格数据表中的一条记录,详细请参考:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/app-table-record/update - llm: Update a record in a multidimensional table data table -parameters: - - name: Authorization - type: string - required: true - label: - en_US: token - zh_Hans: 凭证 - human_description: - en_US: API access token parameter, tenant_access_token or user_access_token - zh_Hans: API 的访问凭证参数,tenant_access_token 或 user_access_token - llm_description: API access token parameter, tenant_access_token or user_access_token - form: llm - - - name: app_token - type: string - required: true - label: - en_US: app_token - zh_Hans: 多维表格 - human_description: - en_US: bitable app token - zh_Hans: 多维表格的唯一标识符 app_token - llm_description: bitable app token - form: llm - - - name: table_id - type: string - required: true - label: - en_US: table_id - zh_Hans: 多维表格的数据表 - human_description: - en_US: bitable table id - zh_Hans: 多维表格数据表的唯一标识符 table_id - llm_description: bitable table id - form: llm - - - name: record_id - type: string - required: true - label: - en_US: record_id - zh_Hans: 单条记录的 id - human_description: - en_US: The id of a single record - zh_Hans: 单条记录的 id - llm_description: The id of a single record - form: llm - - - name: fields - type: string - required: true - label: - en_US: fields - zh_Hans: 数据表的列字段内容 - human_description: - en_US: The fields of a multidimensional table data table, that is, the columns of the data table. - zh_Hans: | - 要更新一行多维表格记录,字段结构拼接如下:{"多行文本":"多行文本内容","单选":"选项1","多选":["选项1","选项2"],"复选框":true,"人员":[{"id":"ou_2910013f1e6456f16a0ce75ede950a0a"}],"群组":[{"id":"oc_cd07f55f14d6f4a4f1b51504e7e97f48"}],"电话号码":"13026162666"} - 当前接口支持的字段类型为:多行文本、单选、条码、多选、日期、人员、附件、复选框、超链接、数字、单向关联、双向关联、电话号码、地理位置。 - 不同类型字段的数据结构请参考数据结构概述:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/bitable-structure - llm_description: | - 要更新一行多维表格记录,字段结构拼接如下:{"多行文本":"多行文本内容","单选":"选项1","多选":["选项1","选项2"],"复选框":true,"人员":[{"id":"ou_2910013f1e6456f16a0ce75ede950a0a"}],"群组":[{"id":"oc_cd07f55f14d6f4a4f1b51504e7e97f48"}],"电话号码":"13026162666"} - 当前接口支持的字段类型为:多行文本、单选、条码、多选、日期、人员、附件、复选框、超链接、数字、单向关联、双向关联、电话号码、地理位置。 - 不同类型字段的数据结构请参考数据结构概述:https://open.larkoffice.com/document/server-docs/docs/bitable-v1/bitable-structure - form: llm diff --git a/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.py b/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.py index d6a0b03d1b..db43790c06 100644 --- a/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.py +++ b/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.py @@ -5,9 +5,12 @@ import requests from core.tools.entities.tool_entities import ToolInvokeMessage from core.tools.tool.builtin_tool import BuiltinTool -SDURL = { - "sd_3": "https://api.siliconflow.cn/v1/stabilityai/stable-diffusion-3-medium/text-to-image", - "sd_xl": "https://api.siliconflow.cn/v1/stabilityai/stable-diffusion-xl-base-1.0/text-to-image", +SILICONFLOW_API_URL = "https://api.siliconflow.cn/v1/image/generations" + +SD_MODELS = { + "sd_3": "stabilityai/stable-diffusion-3-medium", + "sd_xl": "stabilityai/stable-diffusion-xl-base-1.0", + "sd_3.5_large": "stabilityai/stable-diffusion-3-5-large", } @@ -22,9 +25,10 @@ class StableDiffusionTool(BuiltinTool): } model = tool_parameters.get("model", "sd_3") - url = SDURL.get(model) + sd_model = SD_MODELS.get(model) payload = { + "model": sd_model, "prompt": tool_parameters.get("prompt"), "negative_prompt": tool_parameters.get("negative_prompt", ""), "image_size": tool_parameters.get("image_size", "1024x1024"), @@ -34,7 +38,7 @@ class StableDiffusionTool(BuiltinTool): "num_inference_steps": tool_parameters.get("num_inference_steps", 20), } - response = requests.post(url, json=payload, headers=headers) + response = requests.post(SILICONFLOW_API_URL, json=payload, headers=headers) if response.status_code != 200: return self.create_text_message(f"Got Error Response:{response.text}") diff --git a/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.yaml b/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.yaml index dce10adc87..b330c92e16 100644 --- a/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.yaml +++ b/api/core/tools/provider/builtin/siliconflow/tools/stable_diffusion.yaml @@ -40,6 +40,9 @@ parameters: - value: sd_xl label: en_US: Stable Diffusion XL + - value: sd_3.5_large + label: + en_US: Stable Diffusion 3.5 Large default: sd_3 label: en_US: Choose Image Model diff --git a/api/core/tools/provider/builtin_tool_provider.py b/api/core/tools/provider/builtin_tool_provider.py index ff022812ef..955a0add3b 100644 --- a/api/core/tools/provider/builtin_tool_provider.py +++ b/api/core/tools/provider/builtin_tool_provider.py @@ -13,7 +13,6 @@ from core.tools.errors import ( from core.tools.provider.tool_provider import ToolProviderController from core.tools.tool.builtin_tool import BuiltinTool from core.tools.tool.tool import Tool -from core.tools.utils.tool_parameter_converter import ToolParameterConverter from core.tools.utils.yaml_utils import load_yaml_file @@ -208,9 +207,7 @@ class BuiltinToolProviderController(ToolProviderController): # the parameter is not set currently, set the default value if needed if parameter_schema.default is not None: - default_value = ToolParameterConverter.cast_parameter_by_type( - parameter_schema.default, parameter_schema.type - ) + default_value = parameter_schema.type.cast_value(parameter_schema.default) tool_parameters[parameter] = default_value def validate_credentials(self, credentials: dict[str, Any]) -> None: diff --git a/api/core/tools/provider/tool_provider.py b/api/core/tools/provider/tool_provider.py index 321b212014..bc05a11562 100644 --- a/api/core/tools/provider/tool_provider.py +++ b/api/core/tools/provider/tool_provider.py @@ -11,7 +11,6 @@ from core.tools.entities.tool_entities import ( ) from core.tools.errors import ToolNotFoundError, ToolParameterValidationError, ToolProviderCredentialValidationError from core.tools.tool.tool import Tool -from core.tools.utils.tool_parameter_converter import ToolParameterConverter class ToolProviderController(BaseModel, ABC): @@ -127,9 +126,7 @@ class ToolProviderController(BaseModel, ABC): # the parameter is not set currently, set the default value if needed if parameter_schema.default is not None: - tool_parameters[parameter] = ToolParameterConverter.cast_parameter_by_type( - parameter_schema.default, parameter_schema.type - ) + tool_parameters[parameter] = parameter_schema.type.cast_value(parameter_schema.default) def validate_credentials_format(self, credentials: dict[str, Any]) -> None: """ diff --git a/api/core/tools/provider/workflow_tool_provider.py b/api/core/tools/provider/workflow_tool_provider.py index 25eaf6a66a..5656dd09ab 100644 --- a/api/core/tools/provider/workflow_tool_provider.py +++ b/api/core/tools/provider/workflow_tool_provider.py @@ -1,6 +1,6 @@ from typing import Optional -from core.app.app_config.entities import VariableEntity, VariableEntityType +from core.app.app_config.entities import VariableEntityType from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_entities import ( @@ -23,6 +23,8 @@ VARIABLE_TO_PARAMETER_TYPE_MAPPING = { VariableEntityType.PARAGRAPH: ToolParameter.ToolParameterType.STRING, VariableEntityType.SELECT: ToolParameter.ToolParameterType.SELECT, VariableEntityType.NUMBER: ToolParameter.ToolParameterType.NUMBER, + VariableEntityType.FILE: ToolParameter.ToolParameterType.FILE, + VariableEntityType.FILE_LIST: ToolParameter.ToolParameterType.FILES, } @@ -36,8 +38,8 @@ class WorkflowToolProviderController(ToolProviderController): if not app: raise ValueError("app not found") - controller = WorkflowToolProviderController( - **{ + controller = WorkflowToolProviderController.model_validate( + { "identity": { "author": db_provider.user.name if db_provider.user_id and db_provider.user else "", "name": db_provider.label, @@ -67,7 +69,7 @@ class WorkflowToolProviderController(ToolProviderController): :param app: the app :return: the tool """ - workflow: Workflow = ( + workflow = ( db.session.query(Workflow) .filter(Workflow.app_id == db_provider.app_id, Workflow.version == db_provider.version) .first() @@ -76,14 +78,14 @@ class WorkflowToolProviderController(ToolProviderController): raise ValueError("workflow not found") # fetch start node - graph: dict = workflow.graph_dict - features_dict: dict = workflow.features_dict + graph = workflow.graph_dict + features_dict = workflow.features_dict features = WorkflowAppConfigManager.convert_features(config_dict=features_dict, app_mode=AppMode.WORKFLOW) parameters = db_provider.parameter_configurations variables = WorkflowToolConfigurationUtils.get_workflow_graph_variables(graph) - def fetch_workflow_variable(variable_name: str) -> VariableEntity: + def fetch_workflow_variable(variable_name: str): return next(filter(lambda x: x.variable == variable_name, variables), None) user = db_provider.user @@ -114,7 +116,6 @@ class WorkflowToolProviderController(ToolProviderController): llm_description=parameter.description, required=variable.required, options=options, - default=variable.default, ) ) elif features.file_upload: @@ -123,7 +124,7 @@ class WorkflowToolProviderController(ToolProviderController): name=parameter.name, label=I18nObject(en_US=parameter.name, zh_Hans=parameter.name), human_description=I18nObject(en_US=parameter.description, zh_Hans=parameter.description), - type=ToolParameter.ToolParameterType.FILE, + type=ToolParameter.ToolParameterType.SYSTEM_FILES, llm_description=parameter.description, required=False, form=parameter.form, diff --git a/api/core/tools/tool/tool.py b/api/core/tools/tool/tool.py index cb4ab51ceb..6cb6e18b6d 100644 --- a/api/core/tools/tool/tool.py +++ b/api/core/tools/tool/tool.py @@ -20,10 +20,9 @@ from core.tools.entities.tool_entities import ( ToolRuntimeVariablePool, ) from core.tools.tool_file_manager import ToolFileManager -from core.tools.utils.tool_parameter_converter import ToolParameterConverter if TYPE_CHECKING: - from core.file.file_obj import FileVar + from core.file.models import File class Tool(BaseModel, ABC): @@ -63,8 +62,12 @@ class Tool(BaseModel, ABC): def __init__(self, **data: Any): super().__init__(**data) - class VariableKey(Enum): + class VariableKey(str, Enum): IMAGE = "image" + DOCUMENT = "document" + VIDEO = "video" + AUDIO = "audio" + CUSTOM = "custom" def fork_tool_runtime(self, runtime: dict[str, Any]) -> "Tool": """ @@ -221,9 +224,7 @@ class Tool(BaseModel, ABC): result = deepcopy(tool_parameters) for parameter in self.parameters or []: if parameter.name in tool_parameters: - result[parameter.name] = ToolParameterConverter.cast_parameter_by_type( - tool_parameters[parameter.name], parameter.type - ) + result[parameter.name] = parameter.type.cast_value(tool_parameters[parameter.name]) return result @@ -295,10 +296,8 @@ class Tool(BaseModel, ABC): """ return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.IMAGE, message=image, save_as=save_as) - def create_file_var_message(self, file_var: "FileVar") -> ToolInvokeMessage: - return ToolInvokeMessage( - type=ToolInvokeMessage.MessageType.FILE_VAR, message="", meta={"file_var": file_var}, save_as="" - ) + def create_file_message(self, file: "File") -> ToolInvokeMessage: + return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.FILE, message="", meta={"file": file}, save_as="") def create_link_message(self, link: str, save_as: str = "") -> ToolInvokeMessage: """ diff --git a/api/core/tools/tool/workflow_tool.py b/api/core/tools/tool/workflow_tool.py index a885b8784f..2ab72213ff 100644 --- a/api/core/tools/tool/workflow_tool.py +++ b/api/core/tools/tool/workflow_tool.py @@ -3,7 +3,7 @@ import logging from copy import deepcopy from typing import Any, Optional, Union -from core.file.file_obj import FileTransferMethod, FileVar +from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter, ToolProviderType from core.tools.tool.tool import Tool from extensions.ext_database import db @@ -45,11 +45,13 @@ class WorkflowTool(Tool): workflow = self._get_workflow(app_id=self.workflow_app_id, version=self.version) # transform the tool parameters - tool_parameters, files = self._transform_args(tool_parameters) + tool_parameters, files = self._transform_args(tool_parameters=tool_parameters) from core.app.apps.workflow.app_generator import WorkflowAppGenerator generator = WorkflowAppGenerator() + assert self.runtime is not None + assert self.runtime.invoke_from is not None result = generator.generate( app_model=app, workflow=workflow, @@ -74,7 +76,7 @@ class WorkflowTool(Tool): else: outputs, files = self._extract_files(outputs) for file in files: - result.append(self.create_file_var_message(file)) + result.append(self.create_file_message(file)) result.append(self.create_text_message(json.dumps(outputs, ensure_ascii=False))) result.append(self.create_json_message(outputs)) @@ -154,22 +156,22 @@ class WorkflowTool(Tool): parameters_result = {} files = [] for parameter in parameter_rules: - if parameter.type == ToolParameter.ToolParameterType.FILE: + if parameter.type == ToolParameter.ToolParameterType.SYSTEM_FILES: file = tool_parameters.get(parameter.name) if file: try: - file_var_list = [FileVar(**f) for f in file] - for file_var in file_var_list: - file_dict = { - "transfer_method": file_var.transfer_method.value, - "type": file_var.type.value, + file_var_list = [File.model_validate(f) for f in file] + for file in file_var_list: + file_dict: dict[str, str | None] = { + "transfer_method": file.transfer_method.value, + "type": file.type.value, } - if file_var.transfer_method == FileTransferMethod.TOOL_FILE: - file_dict["tool_file_id"] = file_var.related_id - elif file_var.transfer_method == FileTransferMethod.LOCAL_FILE: - file_dict["upload_file_id"] = file_var.related_id - elif file_var.transfer_method == FileTransferMethod.REMOTE_URL: - file_dict["url"] = file_var.preview_url + if file.transfer_method == FileTransferMethod.TOOL_FILE: + file_dict["tool_file_id"] = file.related_id + elif file.transfer_method == FileTransferMethod.LOCAL_FILE: + file_dict["upload_file_id"] = file.related_id + elif file.transfer_method == FileTransferMethod.REMOTE_URL: + file_dict["url"] = file.generate_url() files.append(file_dict) except Exception as e: @@ -179,7 +181,7 @@ class WorkflowTool(Tool): return parameters_result, files - def _extract_files(self, outputs: dict) -> tuple[dict, list[FileVar]]: + def _extract_files(self, outputs: dict) -> tuple[dict, list[File]]: """ extract files from the result @@ -190,17 +192,13 @@ class WorkflowTool(Tool): result = {} for key, value in outputs.items(): if isinstance(value, list): - has_file = False for item in value: - if isinstance(item, dict) and item.get("__variant") == "FileVar": - try: - files.append(FileVar(**item)) - has_file = True - except Exception as e: - pass - if has_file: - continue + if isinstance(item, dict) and item.get("dify_model_identity") == FILE_MODEL_IDENTITY: + file = File.model_validate(item) + files.append(file) + elif isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY: + file = File.model_validate(value) + files.append(file) result[key] = value - return result, files diff --git a/api/core/tools/tool_engine.py b/api/core/tools/tool_engine.py index 9912114dd6..9e290c3651 100644 --- a/api/core/tools/tool_engine.py +++ b/api/core/tools/tool_engine.py @@ -10,7 +10,8 @@ from yarl import URL from core.app.entities.app_invoke_entities import InvokeFrom from core.callback_handler.agent_tool_callback_handler import DifyAgentCallbackHandler from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler -from core.file.file_obj import FileTransferMethod +from core.file import FileType +from core.file.models import FileTransferMethod from core.ops.ops_trace_manager import TraceQueueManager from core.tools.entities.tool_entities import ToolInvokeMessage, ToolInvokeMessageBinary, ToolInvokeMeta, ToolParameter from core.tools.errors import ( @@ -26,6 +27,7 @@ from core.tools.tool.tool import Tool from core.tools.tool.workflow_tool import WorkflowTool from core.tools.utils.message_transformer import ToolFileMessageTransformer from extensions.ext_database import db +from models.enums import CreatedByRole from models.model import Message, MessageFile @@ -128,6 +130,7 @@ class ToolEngine: """ try: # hit the callback handler + assert tool.identity is not None workflow_tool_callback.on_tool_start(tool_name=tool.identity.name, tool_inputs=tool_parameters) if isinstance(tool, WorkflowTool): @@ -258,7 +261,10 @@ class ToolEngine: @staticmethod def _create_message_files( - tool_messages: list[ToolInvokeMessageBinary], agent_message: Message, invoke_from: InvokeFrom, user_id: str + tool_messages: list[ToolInvokeMessageBinary], + agent_message: Message, + invoke_from: InvokeFrom, + user_id: str, ) -> list[tuple[Any, str]]: """ Create message file @@ -269,29 +275,31 @@ class ToolEngine: result = [] for message in tool_messages: - file_type = "bin" if "image" in message.mimetype: - file_type = "image" + file_type = FileType.IMAGE elif "video" in message.mimetype: - file_type = "video" + file_type = FileType.VIDEO elif "audio" in message.mimetype: - file_type = "audio" - elif "text" in message.mimetype: - file_type = "text" - elif "pdf" in message.mimetype: - file_type = "pdf" - elif "zip" in message.mimetype: - file_type = "archive" - # ... + file_type = FileType.AUDIO + elif "text" in message.mimetype or "pdf" in message.mimetype: + file_type = FileType.DOCUMENT + else: + file_type = FileType.CUSTOM + # extract tool file id from url + tool_file_id = message.url.split("/")[-1].split(".")[0] message_file = MessageFile( message_id=agent_message.id, type=file_type, - transfer_method=FileTransferMethod.TOOL_FILE.value, + transfer_method=FileTransferMethod.TOOL_FILE, belongs_to="assistant", url=message.url, - upload_file_id=None, - created_by_role=("account" if invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} else "end_user"), + upload_file_id=tool_file_id, + created_by_role=( + CreatedByRole.ACCOUNT + if invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} + else CreatedByRole.END_USER + ), created_by=user_id, ) diff --git a/api/core/tools/tool_file_manager.py b/api/core/tools/tool_file_manager.py index ad3b9c7328..1a28df31bc 100644 --- a/api/core/tools/tool_file_manager.py +++ b/api/core/tools/tool_file_manager.py @@ -4,7 +4,6 @@ import hmac import logging import os import time -from collections.abc import Generator from mimetypes import guess_extension, guess_type from typing import Optional, Union from uuid import uuid4 @@ -57,22 +56,32 @@ class ToolFileManager: @staticmethod def create_file_by_raw( - user_id: str, tenant_id: str, conversation_id: Optional[str], file_binary: bytes, mimetype: str + *, + user_id: str, + tenant_id: str, + conversation_id: Optional[str], + file_binary: bytes, + mimetype: str, ) -> ToolFile: - """ - create file - """ extension = guess_extension(mimetype) or ".bin" unique_name = uuid4().hex - filename = f"tools/{tenant_id}/{unique_name}{extension}" - storage.save(filename, file_binary) + filename = f"{unique_name}{extension}" + filepath = f"tools/{tenant_id}/{filename}" + storage.save(filepath, file_binary) tool_file = ToolFile( - user_id=user_id, tenant_id=tenant_id, conversation_id=conversation_id, file_key=filename, mimetype=mimetype + user_id=user_id, + tenant_id=tenant_id, + conversation_id=conversation_id, + file_key=filepath, + mimetype=mimetype, + name=filename, + size=len(file_binary), ) db.session.add(tool_file) db.session.commit() + db.session.refresh(tool_file) return tool_file @@ -80,29 +89,34 @@ class ToolFileManager: def create_file_by_url( user_id: str, tenant_id: str, - conversation_id: str, + conversation_id: str | None, file_url: str, ) -> ToolFile: - """ - create file - """ # try to download image - response = get(file_url) - response.raise_for_status() - blob = response.content + try: + response = get(file_url) + response.raise_for_status() + blob = response.content + except Exception as e: + logger.error(f"Failed to download file from {file_url}: {e}") + raise + mimetype = guess_type(file_url)[0] or "octet/stream" extension = guess_extension(mimetype) or ".bin" unique_name = uuid4().hex - filename = f"tools/{tenant_id}/{unique_name}{extension}" - storage.save(filename, blob) + filename = f"{unique_name}{extension}" + filepath = f"tools/{tenant_id}/{filename}" + storage.save(filepath, blob) tool_file = ToolFile( user_id=user_id, tenant_id=tenant_id, conversation_id=conversation_id, - file_key=filename, + file_key=filepath, mimetype=mimetype, original_url=file_url, + name=filename, + size=len(blob), ) db.session.add(tool_file) @@ -110,18 +124,6 @@ class ToolFileManager: return tool_file - @staticmethod - def create_file_by_key( - user_id: str, tenant_id: str, conversation_id: str, file_key: str, mimetype: str - ) -> ToolFile: - """ - create file - """ - tool_file = ToolFile( - user_id=user_id, tenant_id=tenant_id, conversation_id=conversation_id, file_key=file_key, mimetype=mimetype - ) - return tool_file - @staticmethod def get_file_binary(id: str) -> Union[tuple[bytes, str], None]: """ @@ -131,7 +133,7 @@ class ToolFileManager: :return: the binary of the file, mime type """ - tool_file: ToolFile = ( + tool_file = ( db.session.query(ToolFile) .filter( ToolFile.id == id, @@ -155,7 +157,7 @@ class ToolFileManager: :return: the binary of the file, mime type """ - message_file: MessageFile = ( + message_file = ( db.session.query(MessageFile) .filter( MessageFile.id == id, @@ -166,13 +168,16 @@ class ToolFileManager: # Check if message_file is not None if message_file is not None: # get tool file id - tool_file_id = message_file.url.split("/")[-1] - # trim extension - tool_file_id = tool_file_id.split(".")[0] + if message_file.url is not None: + tool_file_id = message_file.url.split("/")[-1] + # trim extension + tool_file_id = tool_file_id.split(".")[0] + else: + tool_file_id = None else: tool_file_id = None - tool_file: ToolFile = ( + tool_file = ( db.session.query(ToolFile) .filter( ToolFile.id == tool_file_id, @@ -188,7 +193,7 @@ class ToolFileManager: return blob, tool_file.mimetype @staticmethod - def get_file_generator_by_tool_file_id(tool_file_id: str) -> Union[tuple[Generator, str], None]: + def get_file_generator_by_tool_file_id(tool_file_id: str): """ get file binary @@ -196,7 +201,7 @@ class ToolFileManager: :return: the binary of the file, mime type """ - tool_file: ToolFile = ( + tool_file = ( db.session.query(ToolFile) .filter( ToolFile.id == tool_file_id, @@ -205,11 +210,11 @@ class ToolFileManager: ) if not tool_file: - return None + return None, None - generator = storage.load_stream(tool_file.file_key) + stream = storage.load_stream(tool_file.file_key) - return generator, tool_file.mimetype + return stream, tool_file # init tool_file_parser diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index ed66dd1357..9e984732b7 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -24,7 +24,6 @@ from core.tools.tool.builtin_tool import BuiltinTool from core.tools.tool.tool import Tool from core.tools.tool_label_manager import ToolLabelManager from core.tools.utils.configuration import ToolConfigurationManager, ToolParameterConfigurationManager -from core.tools.utils.tool_parameter_converter import ToolParameterConverter from extensions.ext_database import db from models.tools import ApiToolProvider, BuiltinToolProvider, WorkflowToolProvider from services.tools.tools_transform_service import ToolTransformService @@ -203,7 +202,7 @@ class ToolManager: raise ToolProviderNotFoundError(f"provider type {provider_type} not found") @classmethod - def _init_runtime_parameter(cls, parameter_rule: ToolParameter, parameters: dict) -> Union[str, int, float, bool]: + def _init_runtime_parameter(cls, parameter_rule: ToolParameter, parameters: dict): """ init runtime parameter """ @@ -222,7 +221,7 @@ class ToolManager: f"tool parameter {parameter_rule.name} value {parameter_value} not in options {options}" ) - return ToolParameterConverter.cast_parameter_by_type(parameter_value, parameter_rule.type) + return parameter_rule.type.cast_value(parameter_value) @classmethod def get_agent_tool_runtime( @@ -243,7 +242,11 @@ class ToolManager: parameters = tool_entity.get_all_runtime_parameters() for parameter in parameters: # check file types - if parameter.type == ToolParameter.ToolParameterType.FILE: + if parameter.type in { + ToolParameter.ToolParameterType.SYSTEM_FILES, + ToolParameter.ToolParameterType.FILE, + ToolParameter.ToolParameterType.FILES, + }: raise ValueError(f"file type parameter {parameter.name} not supported in agent") if parameter.form == ToolParameter.ToolParameterForm.FORM: diff --git a/api/core/tools/utils/feishu_api_utils.py b/api/core/tools/utils/feishu_api_utils.py index 245b296d18..722cf4b538 100644 --- a/api/core/tools/utils/feishu_api_utils.py +++ b/api/core/tools/utils/feishu_api_utils.py @@ -1,3 +1,4 @@ +import json from typing import Optional import httpx @@ -17,6 +18,41 @@ def auth(credentials): raise ToolProviderCredentialValidationError(str(e)) +def convert_add_records(json_str): + try: + data = json.loads(json_str) + if not isinstance(data, list): + raise ValueError("Parsed data must be a list") + converted_data = [{"fields": json.dumps(item, ensure_ascii=False)} for item in data] + return converted_data + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + except Exception as e: + raise ValueError(f"An error occurred while processing the data: {e}") + + +def convert_update_records(json_str): + try: + data = json.loads(json_str) + if not isinstance(data, list): + raise ValueError("Parsed data must be a list") + + converted_data = [ + {"fields": json.dumps(record["fields"], ensure_ascii=False), "record_id": record["record_id"]} + for record in data + if "fields" in record and "record_id" in record + ] + + if len(converted_data) != len(data): + raise ValueError("Each record must contain 'fields' and 'record_id'") + + return converted_data + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + except Exception as e: + raise ValueError(f"An error occurred while processing the data: {e}") + + class FeishuRequest: API_BASE_URL = "https://lark-plugin-api.solutionsuite.cn/lark-plugin" @@ -517,3 +553,270 @@ class FeishuRequest: } res = self._send_request(url, method="GET", params=params) return res.get("data") + + def create_base( + self, + name: str, + folder_token: str, + ) -> dict: + # 创建多维表格 + url = f"{self.API_BASE_URL}/base/create_base" + payload = { + "name": name, + "folder_token": folder_token, + } + res = self._send_request(url, payload=payload) + return res.get("data") + + def add_records( + self, + app_token: str, + table_id: str, + table_name: str, + records: str, + user_id_type: str = "open_id", + ) -> dict: + # 新增多条记录 + url = f"{self.API_BASE_URL}/base/add_records" + params = { + "app_token": app_token, + "table_id": table_id, + "table_name": table_name, + "user_id_type": user_id_type, + } + payload = { + "records": convert_add_records(records), + } + res = self._send_request(url, params=params, payload=payload) + return res.get("data") + + def update_records( + self, + app_token: str, + table_id: str, + table_name: str, + records: str, + user_id_type: str, + ) -> dict: + # 更新多条记录 + url = f"{self.API_BASE_URL}/base/update_records" + params = { + "app_token": app_token, + "table_id": table_id, + "table_name": table_name, + "user_id_type": user_id_type, + } + payload = { + "records": convert_update_records(records), + } + res = self._send_request(url, params=params, payload=payload) + return res.get("data") + + def delete_records( + self, + app_token: str, + table_id: str, + table_name: str, + record_ids: str, + ) -> dict: + # 删除多条记录 + url = f"{self.API_BASE_URL}/base/delete_records" + params = { + "app_token": app_token, + "table_id": table_id, + "table_name": table_name, + } + if not record_ids: + record_id_list = [] + else: + try: + record_id_list = json.loads(record_ids) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + payload = { + "records": record_id_list, + } + res = self._send_request(url, params=params, payload=payload) + return res.get("data") + + def search_record( + self, + app_token: str, + table_id: str, + table_name: str, + view_id: str, + field_names: str, + sort: str, + filters: str, + page_token: str, + automatic_fields: bool = False, + user_id_type: str = "open_id", + page_size: int = 20, + ) -> dict: + # 查询记录,单次最多查询 500 行记录。 + url = f"{self.API_BASE_URL}/base/search_record" + params = { + "app_token": app_token, + "table_id": table_id, + "table_name": table_name, + "user_id_type": user_id_type, + "page_token": page_token, + "page_size": page_size, + } + + if not field_names: + field_name_list = [] + else: + try: + field_name_list = json.loads(field_names) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + + if not sort: + sort_list = [] + else: + try: + sort_list = json.loads(sort) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + + if not filters: + filter_dict = {} + else: + try: + filter_dict = json.loads(filters) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + + payload = {} + + if view_id: + payload["view_id"] = view_id + if field_names: + payload["field_names"] = field_name_list + if sort: + payload["sort"] = sort_list + if filters: + payload["filter"] = filter_dict + if automatic_fields: + payload["automatic_fields"] = automatic_fields + res = self._send_request(url, params=params, payload=payload) + return res.get("data") + + def get_base_info( + self, + app_token: str, + ) -> dict: + # 获取多维表格元数据 + url = f"{self.API_BASE_URL}/base/get_base_info" + params = { + "app_token": app_token, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") + + def create_table( + self, + app_token: str, + table_name: str, + default_view_name: str, + fields: str, + ) -> dict: + # 新增一个数据表 + url = f"{self.API_BASE_URL}/base/create_table" + params = { + "app_token": app_token, + } + if not fields: + fields_list = [] + else: + try: + fields_list = json.loads(fields) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + payload = { + "name": table_name, + "fields": fields_list, + } + if default_view_name: + payload["default_view_name"] = default_view_name + res = self._send_request(url, params=params, payload=payload) + return res.get("data") + + def delete_tables( + self, + app_token: str, + table_ids: str, + table_names: str, + ) -> dict: + # 删除多个数据表 + url = f"{self.API_BASE_URL}/base/delete_tables" + params = { + "app_token": app_token, + } + if not table_ids: + table_id_list = [] + else: + try: + table_id_list = json.loads(table_ids) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + + if not table_names: + table_name_list = [] + else: + try: + table_name_list = json.loads(table_names) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + + payload = { + "table_ids": table_id_list, + "table_names": table_name_list, + } + res = self._send_request(url, params=params, payload=payload) + return res.get("data") + + def list_tables( + self, + app_token: str, + page_token: str, + page_size: int = 20, + ) -> dict: + # 列出多维表格下的全部数据表 + url = f"{self.API_BASE_URL}/base/list_tables" + params = { + "app_token": app_token, + "page_token": page_token, + "page_size": page_size, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") + + def read_records( + self, + app_token: str, + table_id: str, + table_name: str, + record_ids: str, + user_id_type: str = "open_id", + ) -> dict: + url = f"{self.API_BASE_URL}/base/read_records" + params = { + "app_token": app_token, + "table_id": table_id, + "table_name": table_name, + } + if not record_ids: + record_id_list = [] + else: + try: + record_id_list = json.loads(record_ids) + except json.JSONDecodeError: + raise ValueError("The input string is not valid JSON") + payload = { + "record_ids": record_id_list, + "user_id_type": user_id_type, + } + res = self._send_request(url, method="GET", params=params, payload=payload) + return res.get("data") diff --git a/api/core/tools/utils/message_transformer.py b/api/core/tools/utils/message_transformer.py index 3cfab207ba..1812d24571 100644 --- a/api/core/tools/utils/message_transformer.py +++ b/api/core/tools/utils/message_transformer.py @@ -1,7 +1,8 @@ import logging from mimetypes import guess_extension +from typing import Optional -from core.file.file_obj import FileTransferMethod, FileType +from core.file import File, FileTransferMethod, FileType from core.tools.entities.tool_entities import ToolInvokeMessage from core.tools.tool_file_manager import ToolFileManager @@ -11,7 +12,7 @@ logger = logging.getLogger(__name__) class ToolFileMessageTransformer: @classmethod def transform_tool_invoke_messages( - cls, messages: list[ToolInvokeMessage], user_id: str, tenant_id: str, conversation_id: str + cls, messages: list[ToolInvokeMessage], user_id: str, tenant_id: str, conversation_id: str | None ) -> list[ToolInvokeMessage]: """ Transform tool message and handle file download @@ -21,7 +22,7 @@ class ToolFileMessageTransformer: for message in messages: if message.type in {ToolInvokeMessage.MessageType.TEXT, ToolInvokeMessage.MessageType.LINK}: result.append(message) - elif message.type == ToolInvokeMessage.MessageType.IMAGE: + elif message.type == ToolInvokeMessage.MessageType.IMAGE and isinstance(message.message, str): # try to download image try: file = ToolFileManager.create_file_by_url( @@ -50,11 +51,14 @@ class ToolFileMessageTransformer: ) elif message.type == ToolInvokeMessage.MessageType.BLOB: # get mime type and save blob to storage + assert message.meta is not None mimetype = message.meta.get("mime_type", "octet/stream") # if message is str, encode it to bytes if isinstance(message.message, str): message.message = message.message.encode("utf-8") + # FIXME: should do a type check here. + assert isinstance(message.message, bytes) file = ToolFileManager.create_file_by_raw( user_id=user_id, tenant_id=tenant_id, @@ -63,7 +67,7 @@ class ToolFileMessageTransformer: mimetype=mimetype, ) - url = cls.get_tool_file_url(file.id, guess_extension(file.mimetype)) + url = cls.get_tool_file_url(tool_file_id=file.id, extension=guess_extension(file.mimetype)) # check if file is image if "image" in mimetype: @@ -84,12 +88,14 @@ class ToolFileMessageTransformer: meta=message.meta.copy() if message.meta is not None else {}, ) ) - elif message.type == ToolInvokeMessage.MessageType.FILE_VAR: - file_var = message.meta.get("file_var") - if file_var: - if file_var.transfer_method == FileTransferMethod.TOOL_FILE: - url = cls.get_tool_file_url(file_var.related_id, file_var.extension) - if file_var.type == FileType.IMAGE: + elif message.type == ToolInvokeMessage.MessageType.FILE: + assert message.meta is not None + file = message.meta.get("file") + if isinstance(file, File): + if file.transfer_method == FileTransferMethod.TOOL_FILE: + assert file.related_id is not None + url = cls.get_tool_file_url(tool_file_id=file.related_id, extension=file.extension) + if file.type == FileType.IMAGE: result.append( ToolInvokeMessage( type=ToolInvokeMessage.MessageType.IMAGE_LINK, @@ -107,11 +113,13 @@ class ToolFileMessageTransformer: meta=message.meta.copy() if message.meta is not None else {}, ) ) + else: + result.append(message) else: result.append(message) return result @classmethod - def get_tool_file_url(cls, tool_file_id: str, extension: str) -> str: + def get_tool_file_url(cls, tool_file_id: str, extension: Optional[str]) -> str: return f'/files/tools/{tool_file_id}{extension or ".bin"}' diff --git a/api/core/tools/utils/tool_parameter_converter.py b/api/core/tools/utils/tool_parameter_converter.py deleted file mode 100644 index 6f7610651c..0000000000 --- a/api/core/tools/utils/tool_parameter_converter.py +++ /dev/null @@ -1,71 +0,0 @@ -from typing import Any - -from core.tools.entities.tool_entities import ToolParameter - - -class ToolParameterConverter: - @staticmethod - def get_parameter_type(parameter_type: str | ToolParameter.ToolParameterType) -> str: - match parameter_type: - case ( - ToolParameter.ToolParameterType.STRING - | ToolParameter.ToolParameterType.SECRET_INPUT - | ToolParameter.ToolParameterType.SELECT - ): - return "string" - - case ToolParameter.ToolParameterType.BOOLEAN: - return "boolean" - - case ToolParameter.ToolParameterType.NUMBER: - return "number" - - case _: - raise ValueError(f"Unsupported parameter type {parameter_type}") - - @staticmethod - def cast_parameter_by_type(value: Any, parameter_type: str) -> Any: - # convert tool parameter config to correct type - try: - match parameter_type: - case ( - ToolParameter.ToolParameterType.STRING - | ToolParameter.ToolParameterType.SECRET_INPUT - | ToolParameter.ToolParameterType.SELECT - ): - if value is None: - return "" - else: - return value if isinstance(value, str) else str(value) - - case ToolParameter.ToolParameterType.BOOLEAN: - if value is None: - return False - elif isinstance(value, str): - # Allowed YAML boolean value strings: https://yaml.org/type/bool.html - # and also '0' for False and '1' for True - match value.lower(): - case "true" | "yes" | "y" | "1": - return True - case "false" | "no" | "n" | "0": - return False - case _: - return bool(value) - else: - return value if isinstance(value, bool) else bool(value) - - case ToolParameter.ToolParameterType.NUMBER: - if isinstance(value, int) | isinstance(value, float): - return value - elif isinstance(value, str) and value != "": - if "." in value: - return float(value) - else: - return int(value) - case ToolParameter.ToolParameterType.FILE: - return value - case _: - return str(value) - - except Exception: - raise ValueError(f"The tool parameter value {value} is not in correct type of {parameter_type}.") diff --git a/api/core/tools/utils/workflow_configuration_sync.py b/api/core/tools/utils/workflow_configuration_sync.py index 94d9fd9eb9..d92bfb9b90 100644 --- a/api/core/tools/utils/workflow_configuration_sync.py +++ b/api/core/tools/utils/workflow_configuration_sync.py @@ -1,19 +1,18 @@ +from collections.abc import Mapping, Sequence +from typing import Any + from core.app.app_config.entities import VariableEntity from core.tools.entities.tool_entities import WorkflowToolParameterConfiguration class WorkflowToolConfigurationUtils: @classmethod - def check_parameter_configurations(cls, configurations: list[dict]): - """ - check parameter configurations - """ + def check_parameter_configurations(cls, configurations: Mapping[str, Any]): for configuration in configurations: - if not WorkflowToolParameterConfiguration(**configuration): - raise ValueError("invalid parameter configuration") + WorkflowToolParameterConfiguration.model_validate(configuration) @classmethod - def get_workflow_graph_variables(cls, graph: dict) -> list[VariableEntity]: + def get_workflow_graph_variables(cls, graph: Mapping[str, Any]) -> Sequence[VariableEntity]: """ get workflow graph variables """ @@ -23,7 +22,7 @@ class WorkflowToolConfigurationUtils: if not start_node: return [] - return [VariableEntity(**variable) for variable in start_node.get("data", {}).get("variables", [])] + return [VariableEntity.model_validate(variable) for variable in start_node.get("data", {}).get("variables", [])] @classmethod def check_is_synced( diff --git a/api/core/tools/utils/yaml_utils.py b/api/core/tools/utils/yaml_utils.py index 99b9f80499..42c7f85bc6 100644 --- a/api/core/tools/utils/yaml_utils.py +++ b/api/core/tools/utils/yaml_utils.py @@ -1,4 +1,5 @@ import logging +from pathlib import Path from typing import Any import yaml @@ -17,15 +18,18 @@ def load_yaml_file(file_path: str, ignore_error: bool = True, default_value: Any :param default_value: the value returned when errors ignored :return: an object of the YAML content """ - try: - with open(file_path, encoding="utf-8") as yaml_file: - try: - yaml_content = yaml.safe_load(yaml_file) - return yaml_content or default_value - except Exception as e: - raise YAMLError(f"Failed to load YAML file {file_path}: {e}") - except Exception as e: + if not file_path or not Path(file_path).exists(): if ignore_error: return default_value else: - raise e + raise FileNotFoundError(f"File not found: {file_path}") + + with open(file_path, encoding="utf-8") as yaml_file: + try: + yaml_content = yaml.safe_load(yaml_file) + return yaml_content or default_value + except Exception as e: + if ignore_error: + return default_value + else: + raise YAMLError(f"Failed to load YAML file {file_path}: {e}") from e diff --git a/api/core/workflow/callbacks/__init__.py b/api/core/workflow/callbacks/__init__.py index e69de29bb2..403fbbaa2f 100644 --- a/api/core/workflow/callbacks/__init__.py +++ b/api/core/workflow/callbacks/__init__.py @@ -0,0 +1,7 @@ +from .base_workflow_callback import WorkflowCallback +from .workflow_logging_callback import WorkflowLoggingCallback + +__all__ = [ + "WorkflowLoggingCallback", + "WorkflowCallback", +] diff --git a/api/core/workflow/entities/base_node_data_entities.py b/api/core/workflow/entities/base_node_data_entities.py deleted file mode 100644 index 2a864dd7a8..0000000000 --- a/api/core/workflow/entities/base_node_data_entities.py +++ /dev/null @@ -1,24 +0,0 @@ -from abc import ABC -from typing import Optional - -from pydantic import BaseModel - - -class BaseNodeData(ABC, BaseModel): - title: str - desc: Optional[str] = None - - -class BaseIterationNodeData(BaseNodeData): - start_node_id: Optional[str] = None - - -class BaseIterationState(BaseModel): - iteration_node_id: str - index: int - inputs: dict - - class MetaData(BaseModel): - pass - - metadata: MetaData diff --git a/api/core/workflow/entities/node_entities.py b/api/core/workflow/entities/node_entities.py index 5353b99ed3..0131bb342b 100644 --- a/api/core/workflow/entities/node_entities.py +++ b/api/core/workflow/entities/node_entities.py @@ -1,52 +1,14 @@ +from collections.abc import Mapping from enum import Enum from typing import Any, Optional from pydantic import BaseModel from core.model_runtime.entities.llm_entities import LLMUsage -from models import WorkflowNodeExecutionStatus +from models.workflow import WorkflowNodeExecutionStatus -class NodeType(Enum): - """ - Node Types. - """ - - START = "start" - END = "end" - ANSWER = "answer" - LLM = "llm" - KNOWLEDGE_RETRIEVAL = "knowledge-retrieval" - IF_ELSE = "if-else" - CODE = "code" - TEMPLATE_TRANSFORM = "template-transform" - QUESTION_CLASSIFIER = "question-classifier" - HTTP_REQUEST = "http-request" - TOOL = "tool" - VARIABLE_AGGREGATOR = "variable-aggregator" - # TODO: merge this into VARIABLE_AGGREGATOR - VARIABLE_ASSIGNER = "variable-assigner" - LOOP = "loop" - ITERATION = "iteration" - ITERATION_START = "iteration-start" # fake start node for iteration - PARAMETER_EXTRACTOR = "parameter-extractor" - CONVERSATION_VARIABLE_ASSIGNER = "assigner" - - @classmethod - def value_of(cls, value: str) -> "NodeType": - """ - Get value of given node type. - - :param value: node type value - :return: node type - """ - for node_type in cls: - if node_type.value == value: - return node_type - raise ValueError(f"invalid node type value {value}") - - -class NodeRunMetadataKey(Enum): +class NodeRunMetadataKey(str, Enum): """ Node Run Metadata Key. """ @@ -70,7 +32,7 @@ class NodeRunResult(BaseModel): status: WorkflowNodeExecutionStatus = WorkflowNodeExecutionStatus.RUNNING - inputs: Optional[dict[str, Any]] = None # node inputs + inputs: Optional[Mapping[str, Any]] = None # node inputs process_data: Optional[dict[str, Any]] = None # process data outputs: Optional[dict[str, Any]] = None # node outputs metadata: Optional[dict[NodeRunMetadataKey, Any]] = None # node metadata @@ -79,24 +41,3 @@ class NodeRunResult(BaseModel): edge_source_handle: Optional[str] = None # source handle id of node with multiple branches error: Optional[str] = None # error message if status is failed - - -class UserFrom(Enum): - """ - User from - """ - - ACCOUNT = "account" - END_USER = "end-user" - - @classmethod - def value_of(cls, value: str) -> "UserFrom": - """ - Value of - :param value: value - :return: - """ - for item in cls: - if item.value == value: - return item - raise ValueError(f"Invalid value: {value}") diff --git a/api/core/workflow/entities/variable_entities.py b/api/core/workflow/entities/variable_entities.py index 1dfb1852f8..8f4c2d7975 100644 --- a/api/core/workflow/entities/variable_entities.py +++ b/api/core/workflow/entities/variable_entities.py @@ -1,3 +1,5 @@ +from collections.abc import Sequence + from pydantic import BaseModel @@ -7,4 +9,4 @@ class VariableSelector(BaseModel): """ variable: str - value_selector: list[str] + value_selector: Sequence[str] diff --git a/api/core/workflow/entities/variable_pool.py b/api/core/workflow/entities/variable_pool.py index b94b7f7198..3dc3395da1 100644 --- a/api/core/workflow/entities/variable_pool.py +++ b/api/core/workflow/entities/variable_pool.py @@ -1,20 +1,22 @@ +import re from collections import defaultdict from collections.abc import Mapping, Sequence from typing import Any, Union -from pydantic import BaseModel, Field, model_validator -from typing_extensions import deprecated +from pydantic import BaseModel, Field -from core.app.segments import Segment, Variable, factory -from core.file.file_obj import FileVar -from core.workflow.enums import SystemVariableKey +from core.file import File, FileAttribute, file_manager +from core.variables import Segment, SegmentGroup, Variable +from core.variables.segments import FileSegment +from factories import variable_factory -VariableValue = Union[str, int, float, dict, list, FileVar] +from ..constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID +from ..enums import SystemVariableKey + +VariableValue = Union[str, int, float, dict, list, File] -SYSTEM_VARIABLE_NODE_ID = "sys" -ENVIRONMENT_VARIABLE_NODE_ID = "env" -CONVERSATION_VARIABLE_NODE_ID = "conversation" +VARIABLE_PATTERN = re.compile(r"\{\{#([a-zA-Z0-9_]{1,50}(?:\.[a-zA-Z_][a-zA-Z0-9_]{0,29}){1,10})#\}\}") class VariablePool(BaseModel): @@ -23,46 +25,63 @@ class VariablePool(BaseModel): # Other elements of the selector are the keys in the second-level dictionary. To get the key, we hash the # elements of the selector except the first one. variable_dictionary: dict[str, dict[int, Segment]] = Field( - description="Variables mapping", default=defaultdict(dict) + description="Variables mapping", + default=defaultdict(dict), ) - # TODO: This user inputs is not used for pool. user_inputs: Mapping[str, Any] = Field( description="User inputs", ) - system_variables: Mapping[SystemVariableKey, Any] = Field( description="System variables", ) + environment_variables: Sequence[Variable] = Field( + description="Environment variables.", + default_factory=list, + ) + conversation_variables: Sequence[Variable] = Field( + description="Conversation variables.", + default_factory=list, + ) - environment_variables: Sequence[Variable] = Field(description="Environment variables.", default_factory=list) + def __init__( + self, + *, + system_variables: Mapping[SystemVariableKey, Any] | None = None, + user_inputs: Mapping[str, Any] | None = None, + environment_variables: Sequence[Variable] | None = None, + conversation_variables: Sequence[Variable] | None = None, + **kwargs, + ): + environment_variables = environment_variables or [] + conversation_variables = conversation_variables or [] + user_inputs = user_inputs or {} + system_variables = system_variables or {} - conversation_variables: Sequence[Variable] | None = None + super().__init__( + system_variables=system_variables, + user_inputs=user_inputs, + environment_variables=environment_variables, + conversation_variables=conversation_variables, + **kwargs, + ) - @model_validator(mode="after") - def val_model_after(self): - """ - Append system variables - :return: - """ - # Add system variables to the variable pool for key, value in self.system_variables.items(): self.add((SYSTEM_VARIABLE_NODE_ID, key.value), value) - # Add environment variables to the variable pool - for var in self.environment_variables or []: + for var in self.environment_variables: self.add((ENVIRONMENT_VARIABLE_NODE_ID, var.name), var) - # Add conversation variables to the variable pool - for var in self.conversation_variables or []: + for var in self.conversation_variables: self.add((CONVERSATION_VARIABLE_NODE_ID, var.name), var) - return self - def add(self, selector: Sequence[str], value: Any, /) -> None: """ Adds a variable to the variable pool. + NOTE: You should not add a non-Segment value to the variable pool + even if it is allowed now. + Args: selector (Sequence[str]): The selector for the variable. value (VariableValue): The value of the variable. @@ -76,13 +95,10 @@ class VariablePool(BaseModel): if len(selector) < 2: raise ValueError("Invalid selector") - if value is None: - return - if isinstance(value, Segment): v = value else: - v = factory.build_segment(value) + v = variable_factory.build_segment(value) hash_key = hash(tuple(selector[1:])) self.variable_dictionary[selector[0]][hash_key] = v @@ -101,32 +117,25 @@ class VariablePool(BaseModel): ValueError: If the selector is invalid. """ if len(selector) < 2: - raise ValueError("Invalid selector") + return None + hash_key = hash(tuple(selector[1:])) value = self.variable_dictionary[selector[0]].get(hash_key) + if value is None: + selector, attr = selector[:-1], selector[-1] + # Python support `attr in FileAttribute` after 3.12 + if attr not in {item.value for item in FileAttribute}: + return None + value = self.get(selector) + if not isinstance(value, FileSegment): + return None + attr = FileAttribute(attr) + attr_value = file_manager.get_attr(file=value.value, attr=attr) + return variable_factory.build_segment(attr_value) + return value - @deprecated("This method is deprecated, use `get` instead.") - def get_any(self, selector: Sequence[str], /) -> Any | None: - """ - Retrieves the value from the variable pool based on the given selector. - - Args: - selector (Sequence[str]): The selector used to identify the variable. - - Returns: - Any: The value associated with the given selector. - - Raises: - ValueError: If the selector is invalid. - """ - if len(selector) < 2: - raise ValueError("Invalid selector") - hash_key = hash(tuple(selector[1:])) - value = self.variable_dictionary[selector[0]].get(hash_key) - return value.to_object() if value else None - def remove(self, selector: Sequence[str], /): """ Remove variables from the variable pool based on the given selector. @@ -145,14 +154,18 @@ class VariablePool(BaseModel): hash_key = hash(tuple(selector[1:])) self.variable_dictionary[selector[0]].pop(hash_key, None) - def remove_node(self, node_id: str, /): - """ - Remove all variables associated with a given node id. + def convert_template(self, template: str, /): + parts = VARIABLE_PATTERN.split(template) + segments = [] + for part in filter(lambda x: x, parts): + if "." in part and (variable := self.get(part.split("."))): + segments.append(variable) + else: + segments.append(variable_factory.build_segment(part)) + return SegmentGroup(value=segments) - Args: - node_id (str): The node id to remove. - - Returns: - None - """ - self.variable_dictionary.pop(node_id, None) + def get_file(self, selector: Sequence[str], /) -> FileSegment | None: + segment = self.get(selector) + if isinstance(segment, FileSegment): + return segment + return None diff --git a/api/core/workflow/entities/workflow_entities.py b/api/core/workflow/entities/workflow_entities.py index 0a1eb57de4..da56af1407 100644 --- a/api/core/workflow/entities/workflow_entities.py +++ b/api/core/workflow/entities/workflow_entities.py @@ -3,12 +3,13 @@ from typing import Optional from pydantic import BaseModel from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities.base_node_data_entities import BaseIterationState -from core.workflow.entities.node_entities import NodeRunResult -from core.workflow.entities.variable_pool import VariablePool -from core.workflow.nodes.base_node import BaseNode, UserFrom +from core.workflow.nodes.base import BaseIterationState, BaseNode +from models.enums import UserFrom from models.workflow import Workflow, WorkflowType +from .node_entities import NodeRunResult +from .variable_pool import VariablePool + class WorkflowNodeAndResult: node: BaseNode diff --git a/api/core/workflow/errors.py b/api/core/workflow/errors.py index 07cbcd981e..bd4ccc1072 100644 --- a/api/core/workflow/errors.py +++ b/api/core/workflow/errors.py @@ -1,4 +1,4 @@ -from core.workflow.nodes.base_node import BaseNode +from core.workflow.nodes.base import BaseNode class WorkflowNodeRunFailedError(Exception): diff --git a/api/core/workflow/graph_engine/__init__.py b/api/core/workflow/graph_engine/__init__.py index e69de29bb2..2fee3d7fad 100644 --- a/api/core/workflow/graph_engine/__init__.py +++ b/api/core/workflow/graph_engine/__init__.py @@ -0,0 +1,3 @@ +from .entities import Graph, GraphInitParams, GraphRuntimeState, RuntimeRouteState + +__all__ = ["Graph", "GraphInitParams", "GraphRuntimeState", "RuntimeRouteState"] diff --git a/api/core/workflow/graph_engine/condition_handlers/condition_handler.py b/api/core/workflow/graph_engine/condition_handlers/condition_handler.py index eda5fe079c..bc3a15bd00 100644 --- a/api/core/workflow/graph_engine/condition_handlers/condition_handler.py +++ b/api/core/workflow/graph_engine/condition_handlers/condition_handler.py @@ -18,11 +18,10 @@ class ConditionRunConditionHandlerHandler(RunConditionHandler): # process condition condition_processor = ConditionProcessor() - input_conditions, group_result = condition_processor.process_conditions( - variable_pool=graph_runtime_state.variable_pool, conditions=self.condition.conditions + _, _, final_result = condition_processor.process_conditions( + variable_pool=graph_runtime_state.variable_pool, + conditions=self.condition.conditions, + operator="and", ) - # Apply the logical operator for the current case - compare_result = all(group_result) - - return compare_result + return final_result diff --git a/api/core/workflow/graph_engine/entities/__init__.py b/api/core/workflow/graph_engine/entities/__init__.py index e69de29bb2..6331a0b723 100644 --- a/api/core/workflow/graph_engine/entities/__init__.py +++ b/api/core/workflow/graph_engine/entities/__init__.py @@ -0,0 +1,6 @@ +from .graph import Graph +from .graph_init_params import GraphInitParams +from .graph_runtime_state import GraphRuntimeState +from .runtime_route_state import RuntimeRouteState + +__all__ = ["Graph", "GraphInitParams", "GraphRuntimeState", "RuntimeRouteState"] diff --git a/api/core/workflow/graph_engine/entities/event.py b/api/core/workflow/graph_engine/entities/event.py index 06dc4cb8f4..86d89e0a32 100644 --- a/api/core/workflow/graph_engine/entities/event.py +++ b/api/core/workflow/graph_engine/entities/event.py @@ -3,9 +3,9 @@ from typing import Any, Optional from pydantic import BaseModel, Field -from core.workflow.entities.base_node_data_entities import BaseNodeData -from core.workflow.entities.node_entities import NodeType from core.workflow.graph_engine.entities.runtime_route_state import RouteNodeState +from core.workflow.nodes import NodeType +from core.workflow.nodes.base import BaseNodeData class GraphEngineEvent(BaseModel): diff --git a/api/core/workflow/graph_engine/entities/graph.py b/api/core/workflow/graph_engine/entities/graph.py index 1175f4af2a..d87c039409 100644 --- a/api/core/workflow/graph_engine/entities/graph.py +++ b/api/core/workflow/graph_engine/entities/graph.py @@ -4,8 +4,8 @@ from typing import Any, Optional, cast from pydantic import BaseModel, Field -from core.workflow.entities.node_entities import NodeType from core.workflow.graph_engine.entities.run_condition import RunCondition +from core.workflow.nodes import NodeType from core.workflow.nodes.answer.answer_stream_generate_router import AnswerStreamGeneratorRouter from core.workflow.nodes.answer.entities import AnswerStreamGenerateRoute from core.workflow.nodes.end.end_stream_generate_router import EndStreamGeneratorRouter diff --git a/api/core/workflow/graph_engine/entities/graph_init_params.py b/api/core/workflow/graph_engine/entities/graph_init_params.py index 1a403f3e49..a0ecd824f4 100644 --- a/api/core/workflow/graph_engine/entities/graph_init_params.py +++ b/api/core/workflow/graph_engine/entities/graph_init_params.py @@ -4,7 +4,7 @@ from typing import Any from pydantic import BaseModel, Field from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities.node_entities import UserFrom +from models.enums import UserFrom from models.workflow import WorkflowType diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index 8342dbd13d..8f58af00ef 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -10,11 +10,7 @@ from flask import Flask, current_app from core.app.apps.base_app_queue_manager import GenerateTaskStoppedError from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities.node_entities import ( - NodeRunMetadataKey, - NodeType, - UserFrom, -) +from core.workflow.entities.node_entities import NodeRunMetadataKey from core.workflow.entities.variable_pool import VariablePool, VariableValue from core.workflow.graph_engine.condition_handlers.condition_manager import ConditionManager from core.workflow.graph_engine.entities.event import ( @@ -36,12 +32,14 @@ from core.workflow.graph_engine.entities.graph import Graph, GraphEdge from core.workflow.graph_engine.entities.graph_init_params import GraphInitParams from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState from core.workflow.graph_engine.entities.runtime_route_state import RouteNodeState +from core.workflow.nodes import NodeType from core.workflow.nodes.answer.answer_stream_processor import AnswerStreamProcessor -from core.workflow.nodes.base_node import BaseNode +from core.workflow.nodes.base import BaseNode from core.workflow.nodes.end.end_stream_processor import EndStreamProcessor from core.workflow.nodes.event import RunCompletedEvent, RunRetrieverResourceEvent, RunStreamChunkEvent -from core.workflow.nodes.node_mapping import node_classes +from core.workflow.nodes.node_mapping import node_type_classes_mapping from extensions.ext_database import db +from models.enums import UserFrom from models.workflow import WorkflowNodeExecutionStatus, WorkflowType logger = logging.getLogger(__name__) @@ -132,15 +130,14 @@ class GraphEngine: yield GraphRunStartedEvent() try: - stream_processor_cls: type[AnswerStreamProcessor | EndStreamProcessor] if self.init_params.workflow_type == WorkflowType.CHAT: - stream_processor_cls = AnswerStreamProcessor + stream_processor = AnswerStreamProcessor( + graph=self.graph, variable_pool=self.graph_runtime_state.variable_pool + ) else: - stream_processor_cls = EndStreamProcessor - - stream_processor = stream_processor_cls( - graph=self.graph, variable_pool=self.graph_runtime_state.variable_pool - ) + stream_processor = EndStreamProcessor( + graph=self.graph, variable_pool=self.graph_runtime_state.variable_pool + ) # run graph generator = stream_processor.process(self._run(start_node_id=self.graph.root_node_id)) @@ -229,10 +226,8 @@ class GraphEngine: raise GraphRunFailedError(f"Node {node_id} config not found.") # convert to specific node - node_type = NodeType.value_of(node_config.get("data", {}).get("type")) - node_cls = node_classes.get(node_type) - if not node_cls: - raise GraphRunFailedError(f"Node {node_id} type {node_type} not found.") + node_type = NodeType(node_config.get("data", {}).get("type")) + node_cls = node_type_classes_mapping[node_type] previous_node_id = previous_route_node_state.node_id if previous_route_node_state else None diff --git a/api/core/workflow/nodes/__init__.py b/api/core/workflow/nodes/__init__.py index e69de29bb2..6101fcf9af 100644 --- a/api/core/workflow/nodes/__init__.py +++ b/api/core/workflow/nodes/__init__.py @@ -0,0 +1,3 @@ +from .enums import NodeType + +__all__ = ["NodeType"] diff --git a/api/core/workflow/nodes/answer/__init__.py b/api/core/workflow/nodes/answer/__init__.py index e69de29bb2..7a10f47eed 100644 --- a/api/core/workflow/nodes/answer/__init__.py +++ b/api/core/workflow/nodes/answer/__init__.py @@ -0,0 +1,4 @@ +from .answer_node import AnswerNode +from .entities import AnswerStreamGenerateRoute + +__all__ = ["AnswerStreamGenerateRoute", "AnswerNode"] diff --git a/api/core/workflow/nodes/answer/answer_node.py b/api/core/workflow/nodes/answer/answer_node.py index deacbbbbb0..520cbdbb60 100644 --- a/api/core/workflow/nodes/answer/answer_node.py +++ b/api/core/workflow/nodes/answer/answer_node.py @@ -1,7 +1,8 @@ from collections.abc import Mapping, Sequence from typing import Any, cast -from core.workflow.entities.node_entities import NodeRunResult, NodeType +from core.variables import ArrayFileSegment, FileSegment +from core.workflow.entities.node_entities import NodeRunResult from core.workflow.nodes.answer.answer_stream_generate_router import AnswerStreamGeneratorRouter from core.workflow.nodes.answer.entities import ( AnswerNodeData, @@ -9,12 +10,13 @@ from core.workflow.nodes.answer.entities import ( TextGenerateRouteChunk, VarGenerateRouteChunk, ) -from core.workflow.nodes.base_node import BaseNode +from core.workflow.nodes.base import BaseNode +from core.workflow.nodes.enums import NodeType from core.workflow.utils.variable_template_parser import VariableTemplateParser from models.workflow import WorkflowNodeExecutionStatus -class AnswerNode(BaseNode): +class AnswerNode(BaseNode[AnswerNodeData]): _node_data_cls = AnswerNodeData _node_type: NodeType = NodeType.ANSWER @@ -23,30 +25,35 @@ class AnswerNode(BaseNode): Run node :return: """ - node_data = self.node_data - node_data = cast(AnswerNodeData, node_data) - # generate routes - generate_routes = AnswerStreamGeneratorRouter.extract_generate_route_from_node_data(node_data) + generate_routes = AnswerStreamGeneratorRouter.extract_generate_route_from_node_data(self.node_data) answer = "" + files = [] for part in generate_routes: if part.type == GenerateRouteChunk.ChunkType.VAR: part = cast(VarGenerateRouteChunk, part) value_selector = part.value_selector - value = self.graph_runtime_state.variable_pool.get(value_selector) - - if value: - answer += value.markdown + variable = self.graph_runtime_state.variable_pool.get(value_selector) + if variable: + if isinstance(variable, FileSegment): + files.append(variable.value) + elif isinstance(variable, ArrayFileSegment): + files.extend(variable.value) + answer += variable.markdown else: part = cast(TextGenerateRouteChunk, part) answer += part.text - return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, outputs={"answer": answer}) + return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, outputs={"answer": answer, "files": files}) @classmethod def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: AnswerNodeData + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: AnswerNodeData, ) -> Mapping[str, Sequence[str]]: """ Extract variable selector to variable mapping @@ -55,9 +62,6 @@ class AnswerNode(BaseNode): :param node_data: node data :return: """ - node_data = node_data - node_data = cast(AnswerNodeData, node_data) - variable_template_parser = VariableTemplateParser(template=node_data.answer) variable_selectors = variable_template_parser.extract_variable_selectors() diff --git a/api/core/workflow/nodes/answer/answer_stream_generate_router.py b/api/core/workflow/nodes/answer/answer_stream_generate_router.py index bbd1f88867..bc4b056148 100644 --- a/api/core/workflow/nodes/answer/answer_stream_generate_router.py +++ b/api/core/workflow/nodes/answer/answer_stream_generate_router.py @@ -1,5 +1,4 @@ from core.prompt.utils.prompt_template_parser import PromptTemplateParser -from core.workflow.entities.node_entities import NodeType from core.workflow.nodes.answer.entities import ( AnswerNodeData, AnswerStreamGenerateRoute, @@ -7,6 +6,7 @@ from core.workflow.nodes.answer.entities import ( TextGenerateRouteChunk, VarGenerateRouteChunk, ) +from core.workflow.nodes.enums import NodeType from core.workflow.utils.variable_template_parser import VariableTemplateParser @@ -149,10 +149,10 @@ class AnswerStreamGeneratorRouter: source_node_id = edge.source_node_id source_node_type = node_id_config_mapping[source_node_id].get("data", {}).get("type") if source_node_type in { - NodeType.ANSWER.value, - NodeType.IF_ELSE.value, - NodeType.QUESTION_CLASSIFIER.value, - NodeType.ITERATION.value, + NodeType.ANSWER, + NodeType.IF_ELSE, + NodeType.QUESTION_CLASSIFIER, + NodeType.ITERATION, }: answer_dependencies[answer_node_id].append(source_node_id) else: diff --git a/api/core/workflow/nodes/answer/answer_stream_processor.py b/api/core/workflow/nodes/answer/answer_stream_processor.py index 32dbf436ec..8a768088da 100644 --- a/api/core/workflow/nodes/answer/answer_stream_processor.py +++ b/api/core/workflow/nodes/answer/answer_stream_processor.py @@ -1,8 +1,8 @@ import logging from collections.abc import Generator -from typing import Optional, cast +from typing import cast -from core.file.file_obj import FileVar +from core.file import FILE_MODEL_IDENTITY, File from core.workflow.entities.variable_pool import VariablePool from core.workflow.graph_engine.entities.event import ( GraphEngineEvent, @@ -22,7 +22,7 @@ class AnswerStreamProcessor(StreamProcessor): super().__init__(graph, variable_pool) self.generate_routes = graph.answer_stream_generate_routes self.route_position = {} - for answer_node_id, route_chunks in self.generate_routes.answer_generate_route.items(): + for answer_node_id in self.generate_routes.answer_generate_route: self.route_position[answer_node_id] = 0 self.current_stream_chunk_generating_node_ids: dict[str, list[str]] = {} @@ -203,7 +203,7 @@ class AnswerStreamProcessor(StreamProcessor): return files @classmethod - def _get_file_var_from_value(cls, value: dict | list) -> Optional[dict]: + def _get_file_var_from_value(cls, value: dict | list): """ Get file var from value :param value: variable value @@ -213,9 +213,9 @@ class AnswerStreamProcessor(StreamProcessor): return None if isinstance(value, dict): - if "__variant" in value and value["__variant"] == FileVar.__name__: + if "dify_model_identity" in value and value["dify_model_identity"] == FILE_MODEL_IDENTITY: return value - elif isinstance(value, FileVar): + elif isinstance(value, File): return value.to_dict() return None diff --git a/api/core/workflow/nodes/answer/entities.py b/api/core/workflow/nodes/answer/entities.py index e356e7fd70..a05cc44c99 100644 --- a/api/core/workflow/nodes/answer/entities.py +++ b/api/core/workflow/nodes/answer/entities.py @@ -1,8 +1,9 @@ +from collections.abc import Sequence from enum import Enum from pydantic import BaseModel, Field -from core.workflow.entities.base_node_data_entities import BaseNodeData +from core.workflow.nodes.base import BaseNodeData class AnswerNodeData(BaseNodeData): @@ -32,7 +33,7 @@ class VarGenerateRouteChunk(GenerateRouteChunk): type: GenerateRouteChunk.ChunkType = GenerateRouteChunk.ChunkType.VAR """generate route chunk type""" - value_selector: list[str] = Field(..., description="value selector") + value_selector: Sequence[str] = Field(..., description="value selector") class TextGenerateRouteChunk(GenerateRouteChunk): diff --git a/api/core/workflow/nodes/base_node.py b/api/core/workflow/nodes/base_node.py deleted file mode 100644 index 7bfe45a13c..0000000000 --- a/api/core/workflow/nodes/base_node.py +++ /dev/null @@ -1,117 +0,0 @@ -from abc import ABC, abstractmethod -from collections.abc import Generator, Mapping, Sequence -from typing import Any, Optional - -from core.workflow.entities.base_node_data_entities import BaseNodeData -from core.workflow.entities.node_entities import NodeRunResult, NodeType -from core.workflow.graph_engine.entities.event import InNodeEvent -from core.workflow.graph_engine.entities.graph import Graph -from core.workflow.graph_engine.entities.graph_init_params import GraphInitParams -from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState -from core.workflow.nodes.event import RunCompletedEvent, RunEvent - - -class BaseNode(ABC): - _node_data_cls: type[BaseNodeData] - _node_type: NodeType - - def __init__( - self, - id: str, - config: Mapping[str, Any], - graph_init_params: GraphInitParams, - graph: Graph, - graph_runtime_state: GraphRuntimeState, - previous_node_id: Optional[str] = None, - thread_pool_id: Optional[str] = None, - ) -> None: - self.id = id - self.tenant_id = graph_init_params.tenant_id - self.app_id = graph_init_params.app_id - self.workflow_type = graph_init_params.workflow_type - self.workflow_id = graph_init_params.workflow_id - self.graph_config = graph_init_params.graph_config - self.user_id = graph_init_params.user_id - self.user_from = graph_init_params.user_from - self.invoke_from = graph_init_params.invoke_from - self.workflow_call_depth = graph_init_params.call_depth - self.graph = graph - self.graph_runtime_state = graph_runtime_state - self.previous_node_id = previous_node_id - self.thread_pool_id = thread_pool_id - - node_id = config.get("id") - if not node_id: - raise ValueError("Node ID is required.") - - self.node_id = node_id - self.node_data = self._node_data_cls(**config.get("data", {})) - - @abstractmethod - def _run(self) -> NodeRunResult | Generator[RunEvent | InNodeEvent, None, None]: - """ - Run node - :return: - """ - raise NotImplementedError - - def run(self) -> Generator[RunEvent | InNodeEvent, None, None]: - """ - Run node entry - :return: - """ - result = self._run() - - if isinstance(result, NodeRunResult): - yield RunCompletedEvent(run_result=result) - else: - yield from result - - @classmethod - def extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], config: dict - ) -> Mapping[str, Sequence[str]]: - """ - Extract variable selector to variable mapping - :param graph_config: graph config - :param config: node config - :return: - """ - node_id = config.get("id") - if not node_id: - raise ValueError("Node ID is required when extracting variable selector to variable mapping.") - - node_data = cls._node_data_cls(**config.get("data", {})) - return cls._extract_variable_selector_to_variable_mapping( - graph_config=graph_config, node_id=node_id, node_data=node_data - ) - - @classmethod - def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: BaseNodeData - ) -> Mapping[str, Sequence[str]]: - """ - Extract variable selector to variable mapping - :param graph_config: graph config - :param node_id: node id - :param node_data: node data - :return: - """ - return {} - - @classmethod - def get_default_config(cls, filters: Optional[dict] = None) -> dict: - """ - Get default config of node. - :param filters: filter by node config parameters. - :return: - """ - return {} - - @property - def node_type(self) -> NodeType: - """ - Get node type - :return: - """ - return self._node_type diff --git a/api/core/workflow/nodes/code/__init__.py b/api/core/workflow/nodes/code/__init__.py index e69de29bb2..8c6dcc7fcc 100644 --- a/api/core/workflow/nodes/code/__init__.py +++ b/api/core/workflow/nodes/code/__init__.py @@ -0,0 +1,3 @@ +from .code_node import CodeNode + +__all__ = ["CodeNode"] diff --git a/api/core/workflow/nodes/code/code_node.py b/api/core/workflow/nodes/code/code_node.py index 9da7ad99f3..9d7d9027c3 100644 --- a/api/core/workflow/nodes/code/code_node.py +++ b/api/core/workflow/nodes/code/code_node.py @@ -1,18 +1,19 @@ from collections.abc import Mapping, Sequence -from typing import Any, Optional, Union, cast +from typing import Any, Optional, Union from configs import dify_config from core.helper.code_executor.code_executor import CodeExecutionError, CodeExecutor, CodeLanguage from core.helper.code_executor.code_node_provider import CodeNodeProvider from core.helper.code_executor.javascript.javascript_code_provider import JavascriptCodeProvider from core.helper.code_executor.python3.python3_code_provider import Python3CodeProvider -from core.workflow.entities.node_entities import NodeRunResult, NodeType -from core.workflow.nodes.base_node import BaseNode +from core.workflow.entities.node_entities import NodeRunResult +from core.workflow.nodes.base import BaseNode from core.workflow.nodes.code.entities import CodeNodeData +from core.workflow.nodes.enums import NodeType from models.workflow import WorkflowNodeExecutionStatus -class CodeNode(BaseNode): +class CodeNode(BaseNode[CodeNodeData]): _node_data_cls = CodeNodeData _node_type = NodeType.CODE @@ -33,24 +34,22 @@ class CodeNode(BaseNode): return code_provider.get_default_config() def _run(self) -> NodeRunResult: - """ - Run code - :return: - """ - node_data = self.node_data - node_data = cast(CodeNodeData, node_data) - # Get code language - code_language = node_data.code_language - code = node_data.code + code_language = self.node_data.code_language + code = self.node_data.code # Get variables variables = {} - for variable_selector in node_data.variables: - variable = variable_selector.variable - value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector) - - variables[variable] = value + for variable_selector in self.node_data.variables: + variable_name = variable_selector.variable + variable = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector) + if variable is None: + return NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs=variables, + error=f"Variable `{variable_selector.value_selector}` not found", + ) + variables[variable_name] = variable.to_object() # Run code try: result = CodeExecutor.execute_workflow_code_template( @@ -60,7 +59,7 @@ class CodeNode(BaseNode): ) # Transform result - result = self._transform_result(result, node_data.outputs) + result = self._transform_result(result, self.node_data.outputs) except (CodeExecutionError, ValueError) as e: return NodeRunResult(status=WorkflowNodeExecutionStatus.FAILED, inputs=variables, error=str(e)) @@ -316,7 +315,11 @@ class CodeNode(BaseNode): @classmethod def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: CodeNodeData + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: CodeNodeData, ) -> Mapping[str, Sequence[str]]: """ Extract variable selector to variable mapping diff --git a/api/core/workflow/nodes/code/entities.py b/api/core/workflow/nodes/code/entities.py index 5eb0e0f63f..e78183baf1 100644 --- a/api/core/workflow/nodes/code/entities.py +++ b/api/core/workflow/nodes/code/entities.py @@ -3,8 +3,8 @@ from typing import Literal, Optional from pydantic import BaseModel from core.helper.code_executor.code_executor import CodeLanguage -from core.workflow.entities.base_node_data_entities import BaseNodeData from core.workflow.entities.variable_entities import VariableSelector +from core.workflow.nodes.base import BaseNodeData class CodeNodeData(BaseNodeData): diff --git a/api/core/workflow/nodes/document_extractor/node.py b/api/core/workflow/nodes/document_extractor/node.py index b4ffee1f13..2520caf713 100644 --- a/api/core/workflow/nodes/document_extractor/node.py +++ b/api/core/workflow/nodes/document_extractor/node.py @@ -1,5 +1,6 @@ import csv import io +import json import docx import pandas as pd @@ -34,7 +35,8 @@ class DocumentExtractorNode(BaseNode[DocumentExtractorNodeData]): def _run(self): variable_selector = self.node_data.variable_selector - variable = self.graph_runtime_state.variable_pool.get(variable_selector) + variable = self.graph_runtime_state.variable_pool.get( + variable_selector) if variable is None: error_message = f"File variable not found for selector: {variable_selector}" @@ -45,7 +47,8 @@ class DocumentExtractorNode(BaseNode[DocumentExtractorNodeData]): value = variable.value inputs = {"variable_selector": variable_selector} - process_data = {"documents": value if isinstance(value, list) else [value]} + process_data = {"documents": value if isinstance(value, list) else [ + value]} try: if isinstance(value, list): @@ -65,7 +68,8 @@ class DocumentExtractorNode(BaseNode[DocumentExtractorNodeData]): outputs={"text": extracted_text}, ) else: - raise DocumentExtractorError(f"Unsupported variable type: {type(value)}") + raise DocumentExtractorError( + f"Unsupported variable type: {type(value)}") except DocumentExtractorError as e: return NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, @@ -77,34 +81,32 @@ class DocumentExtractorNode(BaseNode[DocumentExtractorNodeData]): def _extract_text_by_mime_type(*, file_content: bytes, mime_type: str) -> str: """Extract text from a file based on its MIME type.""" - if mime_type.startswith("text/plain") or mime_type in {"text/html", "text/htm", "text/markdown", "text/xml"}: - return _extract_text_from_plain_text(file_content) - elif mime_type == "application/pdf": - return _extract_text_from_pdf(file_content) - elif mime_type in { - "application/vnd.openxmlformats-officedocument.wordprocessingml.document", - "application/msword", - }: - return _extract_text_from_doc(file_content) - elif mime_type == "text/csv": - return _extract_text_from_csv(file_content) - elif mime_type in { - "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", - "application/vnd.ms-excel", - }: - return _extract_text_from_excel(file_content) - elif mime_type == "application/vnd.ms-powerpoint": - return _extract_text_from_ppt(file_content) - elif mime_type == "application/vnd.openxmlformats-officedocument.presentationml.presentation": - return _extract_text_from_pptx(file_content) - elif mime_type == "application/epub+zip": - return _extract_text_from_epub(file_content) - elif mime_type == "message/rfc822": - return _extract_text_from_eml(file_content) - elif mime_type == "application/vnd.ms-outlook": - return _extract_text_from_msg(file_content) - else: - raise UnsupportedFileTypeError(f"Unsupported MIME type: {mime_type}") + match mime_type: + case "text/plain" | "text/html" | "text/htm" | "text/markdown" | "text/xml": + return _extract_text_from_plain_text(file_content) + case "application/pdf": + return _extract_text_from_pdf(file_content) + case "application/vnd.openxmlformats-officedocument.wordprocessingml.document" | "application/msword": + return _extract_text_from_doc(file_content) + case "text/csv": + return _extract_text_from_csv(file_content) + case "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" | "application/vnd.ms-excel": + return _extract_text_from_excel(file_content) + case "application/vnd.ms-powerpoint": + return _extract_text_from_ppt(file_content) + case "application/vnd.openxmlformats-officedocument.presentationml.presentation": + return _extract_text_from_pptx(file_content) + case "application/epub+zip": + return _extract_text_from_epub(file_content) + case "message/rfc822": + return _extract_text_from_eml(file_content) + case "application/vnd.ms-outlook": + return _extract_text_from_msg(file_content) + case "application/json": + return _extract_text_from_json(file_content) + case _: + raise UnsupportedFileTypeError( + f"Unsupported MIME type: {mime_type}") def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str) -> str: @@ -112,6 +114,8 @@ def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str) match file_extension: case ".txt" | ".markdown" | ".md" | ".html" | ".htm" | ".xml": return _extract_text_from_plain_text(file_content) + case ".json": + return _extract_text_from_json(file_content) case ".pdf": return _extract_text_from_pdf(file_content) case ".doc" | ".docx": @@ -131,7 +135,8 @@ def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str) case ".msg": return _extract_text_from_msg(file_content) case _: - raise UnsupportedFileTypeError(f"Unsupported Extension Type: {file_extension}") + raise UnsupportedFileTypeError( + f"Unsupported Extension Type: {file_extension}") def _extract_text_from_plain_text(file_content: bytes) -> str: @@ -141,6 +146,15 @@ def _extract_text_from_plain_text(file_content: bytes) -> str: raise TextExtractionError("Failed to decode plain text file") from e +def _extract_text_from_json(file_content: bytes) -> str: + try: + json_data = json.loads(file_content.decode("utf-8")) + return json.dumps(json_data, indent=2, ensure_ascii=False) + except (UnicodeDecodeError, json.JSONDecodeError) as e: + raise TextExtractionError( + f"Failed to decode or parse JSON file: {e}") from e + + def _extract_text_from_pdf(file_content: bytes) -> str: try: pdf_file = io.BytesIO(file_content) @@ -153,7 +167,8 @@ def _extract_text_from_pdf(file_content: bytes) -> str: page.close() return text except Exception as e: - raise TextExtractionError(f"Failed to extract text from PDF: {str(e)}") from e + raise TextExtractionError( + f"Failed to extract text from PDF: {str(e)}") from e def _extract_text_from_doc(file_content: bytes) -> str: @@ -162,7 +177,8 @@ def _extract_text_from_doc(file_content: bytes) -> str: doc = docx.Document(doc_file) return "\n".join([paragraph.text for paragraph in doc.paragraphs]) except Exception as e: - raise TextExtractionError(f"Failed to extract text from DOC/DOCX: {str(e)}") from e + raise TextExtractionError( + f"Failed to extract text from DOC/DOCX: {str(e)}") from e def _download_file_content(file: File) -> bytes: @@ -177,19 +193,23 @@ def _download_file_content(file: File) -> bytes: elif file.transfer_method == FileTransferMethod.LOCAL_FILE: return file_manager.download(file) else: - raise ValueError(f"Unsupported transfer method: {file.transfer_method}") + raise ValueError( + f"Unsupported transfer method: {file.transfer_method}") except Exception as e: raise FileDownloadError(f"Error downloading file: {str(e)}") from e def _extract_text_from_file(file: File): - if file.mime_type is None: - raise UnsupportedFileTypeError("Unable to determine file type: MIME type is missing") file_content = _download_file_content(file) - if file.transfer_method == FileTransferMethod.REMOTE_URL: - extracted_text = _extract_text_by_mime_type(file_content=file_content, mime_type=file.mime_type) + if file.extension: + extracted_text = _extract_text_by_file_extension( + file_content=file_content, file_extension=file.extension) + elif file.mime_type: + extracted_text = _extract_text_by_mime_type( + file_content=file_content, mime_type=file.mime_type) else: - extracted_text = _extract_text_by_file_extension(file_content=file_content, file_extension=file.extension) + raise UnsupportedFileTypeError( + "Unable to determine file type: MIME type or file extension is missing") return extracted_text @@ -210,7 +230,8 @@ def _extract_text_from_csv(file_content: bytes) -> str: return markdown_table.strip() except Exception as e: - raise TextExtractionError(f"Failed to extract text from CSV: {str(e)}") from e + raise TextExtractionError( + f"Failed to extract text from CSV: {str(e)}") from e def _extract_text_from_excel(file_content: bytes) -> str: @@ -226,7 +247,8 @@ def _extract_text_from_excel(file_content: bytes) -> str: markdown_table = df.to_markdown(index=False) return markdown_table except Exception as e: - raise TextExtractionError(f"Failed to extract text from Excel file: {str(e)}") from e + raise TextExtractionError( + f"Failed to extract text from Excel file: {str(e)}") from e def _extract_text_from_ppt(file_content: bytes) -> str: @@ -235,7 +257,8 @@ def _extract_text_from_ppt(file_content: bytes) -> str: elements = partition_ppt(file=file) return "\n".join([getattr(element, "text", "") for element in elements]) except Exception as e: - raise TextExtractionError(f"Failed to extract text from PPT: {str(e)}") from e + raise TextExtractionError( + f"Failed to extract text from PPT: {str(e)}") from e def _extract_text_from_pptx(file_content: bytes) -> str: @@ -244,7 +267,8 @@ def _extract_text_from_pptx(file_content: bytes) -> str: elements = partition_pptx(file=file) return "\n".join([getattr(element, "text", "") for element in elements]) except Exception as e: - raise TextExtractionError(f"Failed to extract text from PPTX: {str(e)}") from e + raise TextExtractionError( + f"Failed to extract text from PPTX: {str(e)}") from e def _extract_text_from_epub(file_content: bytes) -> str: @@ -253,7 +277,8 @@ def _extract_text_from_epub(file_content: bytes) -> str: elements = partition_epub(file=file) return "\n".join([str(element) for element in elements]) except Exception as e: - raise TextExtractionError(f"Failed to extract text from EPUB: {str(e)}") from e + raise TextExtractionError( + f"Failed to extract text from EPUB: {str(e)}") from e def _extract_text_from_eml(file_content: bytes) -> str: @@ -262,7 +287,8 @@ def _extract_text_from_eml(file_content: bytes) -> str: elements = partition_email(file=file) return "\n".join([str(element) for element in elements]) except Exception as e: - raise TextExtractionError(f"Failed to extract text from EML: {str(e)}") from e + raise TextExtractionError( + f"Failed to extract text from EML: {str(e)}") from e def _extract_text_from_msg(file_content: bytes) -> str: @@ -271,4 +297,5 @@ def _extract_text_from_msg(file_content: bytes) -> str: elements = partition_msg(file=file) return "\n".join([str(element) for element in elements]) except Exception as e: - raise TextExtractionError(f"Failed to extract text from MSG: {str(e)}") from e + raise TextExtractionError( + f"Failed to extract text from MSG: {str(e)}") from e diff --git a/api/core/workflow/nodes/end/__init__.py b/api/core/workflow/nodes/end/__init__.py index e69de29bb2..adb381701c 100644 --- a/api/core/workflow/nodes/end/__init__.py +++ b/api/core/workflow/nodes/end/__init__.py @@ -0,0 +1,4 @@ +from .end_node import EndNode +from .entities import EndStreamParam + +__all__ = ["EndStreamParam", "EndNode"] diff --git a/api/core/workflow/nodes/end/end_node.py b/api/core/workflow/nodes/end/end_node.py index 7b78d67be8..2398e4e89d 100644 --- a/api/core/workflow/nodes/end/end_node.py +++ b/api/core/workflow/nodes/end/end_node.py @@ -1,13 +1,14 @@ from collections.abc import Mapping, Sequence -from typing import Any, cast +from typing import Any -from core.workflow.entities.node_entities import NodeRunResult, NodeType -from core.workflow.nodes.base_node import BaseNode +from core.workflow.entities.node_entities import NodeRunResult +from core.workflow.nodes.base import BaseNode from core.workflow.nodes.end.entities import EndNodeData +from core.workflow.nodes.enums import NodeType from models.workflow import WorkflowNodeExecutionStatus -class EndNode(BaseNode): +class EndNode(BaseNode[EndNodeData]): _node_data_cls = EndNodeData _node_type = NodeType.END @@ -16,20 +17,27 @@ class EndNode(BaseNode): Run node :return: """ - node_data = self.node_data - node_data = cast(EndNodeData, node_data) - output_variables = node_data.outputs + output_variables = self.node_data.outputs outputs = {} for variable_selector in output_variables: - value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector) + variable = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector) + value = variable.to_object() if variable is not None else None outputs[variable_selector.variable] = value - return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=outputs, outputs=outputs) + return NodeRunResult( + status=WorkflowNodeExecutionStatus.SUCCEEDED, + inputs=outputs, + outputs=outputs, + ) @classmethod def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: EndNodeData + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: EndNodeData, ) -> Mapping[str, Sequence[str]]: """ Extract variable selector to variable mapping diff --git a/api/core/workflow/nodes/end/end_stream_generate_router.py b/api/core/workflow/nodes/end/end_stream_generate_router.py index 9a7d2ecde3..ea8b6b5042 100644 --- a/api/core/workflow/nodes/end/end_stream_generate_router.py +++ b/api/core/workflow/nodes/end/end_stream_generate_router.py @@ -1,5 +1,5 @@ -from core.workflow.entities.node_entities import NodeType from core.workflow.nodes.end.entities import EndNodeData, EndStreamParam +from core.workflow.nodes.enums import NodeType class EndStreamGeneratorRouter: diff --git a/api/core/workflow/nodes/end/entities.py b/api/core/workflow/nodes/end/entities.py index c3270ac22a..c16e85b0eb 100644 --- a/api/core/workflow/nodes/end/entities.py +++ b/api/core/workflow/nodes/end/entities.py @@ -1,7 +1,7 @@ from pydantic import BaseModel, Field -from core.workflow.entities.base_node_data_entities import BaseNodeData from core.workflow.entities.variable_entities import VariableSelector +from core.workflow.nodes.base import BaseNodeData class EndNodeData(BaseNodeData): diff --git a/api/core/workflow/nodes/event.py b/api/core/workflow/nodes/event.py deleted file mode 100644 index 276c13a6d4..0000000000 --- a/api/core/workflow/nodes/event.py +++ /dev/null @@ -1,20 +0,0 @@ -from pydantic import BaseModel, Field - -from core.workflow.entities.node_entities import NodeRunResult - - -class RunCompletedEvent(BaseModel): - run_result: NodeRunResult = Field(..., description="run result") - - -class RunStreamChunkEvent(BaseModel): - chunk_content: str = Field(..., description="chunk content") - from_variable_selector: list[str] = Field(..., description="from variable selector") - - -class RunRetrieverResourceEvent(BaseModel): - retriever_resources: list[dict] = Field(..., description="retriever resources") - context: str = Field(..., description="context") - - -RunEvent = RunCompletedEvent | RunStreamChunkEvent | RunRetrieverResourceEvent diff --git a/api/core/workflow/nodes/http_request/__init__.py b/api/core/workflow/nodes/http_request/__init__.py index e69de29bb2..9408c2dde0 100644 --- a/api/core/workflow/nodes/http_request/__init__.py +++ b/api/core/workflow/nodes/http_request/__init__.py @@ -0,0 +1,4 @@ +from .entities import BodyData, HttpRequestNodeAuthorization, HttpRequestNodeBody, HttpRequestNodeData +from .node import HttpRequestNode + +__all__ = ["HttpRequestNodeData", "HttpRequestNodeAuthorization", "HttpRequestNodeBody", "BodyData", "HttpRequestNode"] diff --git a/api/core/workflow/nodes/http_request/entities.py b/api/core/workflow/nodes/http_request/entities.py index 66dd1f2dc6..dec76a277e 100644 --- a/api/core/workflow/nodes/http_request/entities.py +++ b/api/core/workflow/nodes/http_request/entities.py @@ -1,15 +1,25 @@ -from typing import Literal, Optional, Union +from collections.abc import Sequence +from typing import Any, Literal, Optional -from pydantic import BaseModel, ValidationInfo, field_validator +import httpx +from pydantic import BaseModel, Field, ValidationInfo, field_validator from configs import dify_config -from core.workflow.entities.base_node_data_entities import BaseNodeData +from core.workflow.nodes.base import BaseNodeData + +NON_FILE_CONTENT_TYPES = ( + "application/json", + "application/xml", + "text/html", + "text/plain", + "application/x-www-form-urlencoded", +) class HttpRequestNodeAuthorizationConfig(BaseModel): - type: Literal[None, "basic", "bearer", "custom"] - api_key: Union[None, str] = None - header: Union[None, str] = None + type: Literal["basic", "bearer", "custom"] + api_key: str + header: str = "" class HttpRequestNodeAuthorization(BaseModel): @@ -31,9 +41,26 @@ class HttpRequestNodeAuthorization(BaseModel): return v +class BodyData(BaseModel): + key: str = "" + type: Literal["file", "text"] + value: str = "" + file: Sequence[str] = Field(default_factory=list) + + class HttpRequestNodeBody(BaseModel): - type: Literal["none", "form-data", "x-www-form-urlencoded", "raw-text", "json"] - data: Union[None, str] = None + type: Literal["none", "form-data", "x-www-form-urlencoded", "raw-text", "json", "binary"] + data: Sequence[BodyData] = Field(default_factory=list) + + @field_validator("data", mode="before") + @classmethod + def check_data(cls, v: Any): + """For compatibility, if body is not set, return empty list.""" + if not v: + return [] + if isinstance(v, str): + return [BodyData(key="", type="text", value=v)] + return v class HttpRequestNodeTimeout(BaseModel): @@ -54,3 +81,51 @@ class HttpRequestNodeData(BaseNodeData): params: str body: Optional[HttpRequestNodeBody] = None timeout: Optional[HttpRequestNodeTimeout] = None + + +class Response: + headers: dict[str, str] + response: httpx.Response + + def __init__(self, response: httpx.Response): + self.response = response + self.headers = dict(response.headers) + + @property + def is_file(self): + content_type = self.content_type + content_disposition = self.response.headers.get("Content-Disposition", "") + + return "attachment" in content_disposition or ( + not any(non_file in content_type for non_file in NON_FILE_CONTENT_TYPES) + and any(file_type in content_type for file_type in ("application/", "image/", "audio/", "video/")) + ) + + @property + def content_type(self) -> str: + return self.headers.get("Content-Type", "") + + @property + def text(self) -> str: + return self.response.text + + @property + def content(self) -> bytes: + return self.response.content + + @property + def status_code(self) -> int: + return self.response.status_code + + @property + def size(self) -> int: + return len(self.content) + + @property + def readable_size(self) -> str: + if self.size < 1024: + return f"{self.size} bytes" + elif self.size < 1024 * 1024: + return f"{(self.size / 1024):.2f} KB" + else: + return f"{(self.size / 1024 / 1024):.2f} MB" diff --git a/api/core/workflow/nodes/http_request/http_executor.py b/api/core/workflow/nodes/http_request/http_executor.py deleted file mode 100644 index f8ab4e3132..0000000000 --- a/api/core/workflow/nodes/http_request/http_executor.py +++ /dev/null @@ -1,343 +0,0 @@ -import json -from copy import deepcopy -from random import randint -from typing import Any, Optional, Union -from urllib.parse import urlencode - -import httpx - -from configs import dify_config -from core.helper import ssrf_proxy -from core.workflow.entities.variable_entities import VariableSelector -from core.workflow.entities.variable_pool import VariablePool -from core.workflow.nodes.http_request.entities import ( - HttpRequestNodeAuthorization, - HttpRequestNodeBody, - HttpRequestNodeData, - HttpRequestNodeTimeout, -) -from core.workflow.utils.variable_template_parser import VariableTemplateParser - - -class HttpExecutorResponse: - headers: dict[str, str] - response: httpx.Response - - def __init__(self, response: httpx.Response): - self.response = response - self.headers = dict(response.headers) if isinstance(self.response, httpx.Response) else {} - - @property - def is_file(self) -> bool: - """ - check if response is file - """ - content_type = self.get_content_type() - file_content_types = ["image", "audio", "video"] - - return any(v in content_type for v in file_content_types) - - def get_content_type(self) -> str: - return self.headers.get("content-type", "") - - def extract_file(self) -> tuple[str, bytes]: - """ - extract file from response if content type is file related - """ - if self.is_file: - return self.get_content_type(), self.body - - return "", b"" - - @property - def content(self) -> str: - if isinstance(self.response, httpx.Response): - return self.response.text - else: - raise ValueError(f"Invalid response type {type(self.response)}") - - @property - def body(self) -> bytes: - if isinstance(self.response, httpx.Response): - return self.response.content - else: - raise ValueError(f"Invalid response type {type(self.response)}") - - @property - def status_code(self) -> int: - if isinstance(self.response, httpx.Response): - return self.response.status_code - else: - raise ValueError(f"Invalid response type {type(self.response)}") - - @property - def size(self) -> int: - return len(self.body) - - @property - def readable_size(self) -> str: - if self.size < 1024: - return f"{self.size} bytes" - elif self.size < 1024 * 1024: - return f"{(self.size / 1024):.2f} KB" - else: - return f"{(self.size / 1024 / 1024):.2f} MB" - - -class HttpExecutor: - server_url: str - method: str - authorization: HttpRequestNodeAuthorization - params: dict[str, Any] - headers: dict[str, Any] - body: Union[None, str] - files: Union[None, dict[str, Any]] - boundary: str - variable_selectors: list[VariableSelector] - timeout: HttpRequestNodeTimeout - - def __init__( - self, - node_data: HttpRequestNodeData, - timeout: HttpRequestNodeTimeout, - variable_pool: Optional[VariablePool] = None, - ): - self.server_url = node_data.url - self.method = node_data.method - self.authorization = node_data.authorization - self.timeout = timeout - self.params = {} - self.headers = {} - self.body = None - self.files = None - - # init template - self.variable_selectors = [] - self._init_template(node_data, variable_pool) - - @staticmethod - def _is_json_body(body: HttpRequestNodeBody): - """ - check if body is json - """ - if body and body.type == "json" and body.data: - try: - json.loads(body.data) - return True - except: - return False - - return False - - @staticmethod - def _to_dict(convert_text: str): - """ - Convert the string like `aa:bb\n cc:dd` to dict `{aa:bb, cc:dd}` - """ - kv_paris = convert_text.split("\n") - result = {} - for kv in kv_paris: - if not kv.strip(): - continue - - kv = kv.split(":", maxsplit=1) - if len(kv) == 1: - k, v = kv[0], "" - else: - k, v = kv - result[k.strip()] = v - return result - - def _init_template(self, node_data: HttpRequestNodeData, variable_pool: Optional[VariablePool] = None): - # extract all template in url - self.server_url, server_url_variable_selectors = self._format_template(node_data.url, variable_pool) - - # extract all template in params - params, params_variable_selectors = self._format_template(node_data.params, variable_pool) - self.params = self._to_dict(params) - - # extract all template in headers - headers, headers_variable_selectors = self._format_template(node_data.headers, variable_pool) - self.headers = self._to_dict(headers) - - # extract all template in body - body_data_variable_selectors = [] - if node_data.body: - # check if it's a valid JSON - is_valid_json = self._is_json_body(node_data.body) - - body_data = node_data.body.data or "" - if body_data: - body_data, body_data_variable_selectors = self._format_template(body_data, variable_pool, is_valid_json) - - content_type_is_set = any(key.lower() == "content-type" for key in self.headers) - if node_data.body.type == "json" and not content_type_is_set: - self.headers["Content-Type"] = "application/json" - elif node_data.body.type == "x-www-form-urlencoded" and not content_type_is_set: - self.headers["Content-Type"] = "application/x-www-form-urlencoded" - - if node_data.body.type in {"form-data", "x-www-form-urlencoded"}: - body = self._to_dict(body_data) - - if node_data.body.type == "form-data": - self.files = {k: ("", v) for k, v in body.items()} - random_str = lambda n: "".join([chr(randint(97, 122)) for _ in range(n)]) - self.boundary = f"----WebKitFormBoundary{random_str(16)}" - - self.headers["Content-Type"] = f"multipart/form-data; boundary={self.boundary}" - else: - self.body = urlencode(body) - elif node_data.body.type in {"json", "raw-text"}: - self.body = body_data - elif node_data.body.type == "none": - self.body = "" - - self.variable_selectors = ( - server_url_variable_selectors - + params_variable_selectors - + headers_variable_selectors - + body_data_variable_selectors - ) - - def _assembling_headers(self) -> dict[str, Any]: - authorization = deepcopy(self.authorization) - headers = deepcopy(self.headers) or {} - if self.authorization.type == "api-key": - if self.authorization.config is None: - raise ValueError("self.authorization config is required") - if authorization.config is None: - raise ValueError("authorization config is required") - - if self.authorization.config.api_key is None: - raise ValueError("api_key is required") - - if not authorization.config.header: - authorization.config.header = "Authorization" - - if self.authorization.config.type == "bearer": - headers[authorization.config.header] = f"Bearer {authorization.config.api_key}" - elif self.authorization.config.type == "basic": - headers[authorization.config.header] = f"Basic {authorization.config.api_key}" - elif self.authorization.config.type == "custom": - headers[authorization.config.header] = authorization.config.api_key - - return headers - - def _validate_and_parse_response(self, response: httpx.Response) -> HttpExecutorResponse: - """ - validate the response - """ - if isinstance(response, httpx.Response): - executor_response = HttpExecutorResponse(response) - else: - raise ValueError(f"Invalid response type {type(response)}") - - threshold_size = ( - dify_config.HTTP_REQUEST_NODE_MAX_BINARY_SIZE - if executor_response.is_file - else dify_config.HTTP_REQUEST_NODE_MAX_TEXT_SIZE - ) - if executor_response.size > threshold_size: - raise ValueError( - f'{"File" if executor_response.is_file else "Text"} size is too large,' - f' max size is {threshold_size / 1024 / 1024:.2f} MB,' - f' but current size is {executor_response.readable_size}.' - ) - - return executor_response - - def _do_http_request(self, headers: dict[str, Any]) -> httpx.Response: - """ - do http request depending on api bundle - """ - kwargs = { - "url": self.server_url, - "headers": headers, - "params": self.params, - "timeout": (self.timeout.connect, self.timeout.read, self.timeout.write), - "follow_redirects": True, - } - - if self.method in {"get", "head", "post", "put", "delete", "patch"}: - response = getattr(ssrf_proxy, self.method)(data=self.body, files=self.files, **kwargs) - else: - raise ValueError(f"Invalid http method {self.method}") - return response - - def invoke(self) -> HttpExecutorResponse: - """ - invoke http request - """ - # assemble headers - headers = self._assembling_headers() - - # do http request - response = self._do_http_request(headers) - - # validate response - return self._validate_and_parse_response(response) - - def to_raw_request(self) -> str: - """ - convert to raw request - """ - server_url = self.server_url - if self.params: - server_url += f"?{urlencode(self.params)}" - - raw_request = f"{self.method.upper()} {server_url} HTTP/1.1\n" - - headers = self._assembling_headers() - for k, v in headers.items(): - # get authorization header - if self.authorization.type == "api-key": - authorization_header = "Authorization" - if self.authorization.config and self.authorization.config.header: - authorization_header = self.authorization.config.header - - if k.lower() == authorization_header.lower(): - raw_request += f'{k}: {"*" * len(v)}\n' - continue - - raw_request += f"{k}: {v}\n" - - raw_request += "\n" - - # if files, use multipart/form-data with boundary - if self.files: - boundary = self.boundary - raw_request += f"--{boundary}" - for k, v in self.files.items(): - raw_request += f'\nContent-Disposition: form-data; name="{k}"\n\n' - raw_request += f"{v[1]}\n" - raw_request += f"--{boundary}" - raw_request += "--" - else: - raw_request += self.body or "" - - return raw_request - - def _format_template( - self, template: str, variable_pool: Optional[VariablePool], escape_quotes: bool = False - ) -> tuple[str, list[VariableSelector]]: - """ - format template - """ - variable_template_parser = VariableTemplateParser(template=template) - variable_selectors = variable_template_parser.extract_variable_selectors() - - if variable_pool: - variable_value_mapping = {} - for variable_selector in variable_selectors: - variable = variable_pool.get_any(variable_selector.value_selector) - if variable is None: - raise ValueError(f"Variable {variable_selector.variable} not found") - if escape_quotes and isinstance(variable, str): - value = variable.replace('"', '\\"').replace("\n", "\\n") - else: - value = variable - variable_value_mapping[variable_selector.variable] = value - - return variable_template_parser.format(variable_value_mapping), variable_selectors - else: - return template, variable_selectors diff --git a/api/core/workflow/nodes/http_request/http_request_node.py b/api/core/workflow/nodes/http_request/http_request_node.py deleted file mode 100644 index cd40819126..0000000000 --- a/api/core/workflow/nodes/http_request/http_request_node.py +++ /dev/null @@ -1,165 +0,0 @@ -import logging -from collections.abc import Mapping, Sequence -from mimetypes import guess_extension -from os import path -from typing import Any, cast - -from configs import dify_config -from core.app.segments import parser -from core.file.file_obj import FileTransferMethod, FileType, FileVar -from core.tools.tool_file_manager import ToolFileManager -from core.workflow.entities.node_entities import NodeRunResult, NodeType -from core.workflow.nodes.base_node import BaseNode -from core.workflow.nodes.http_request.entities import ( - HttpRequestNodeData, - HttpRequestNodeTimeout, -) -from core.workflow.nodes.http_request.http_executor import HttpExecutor, HttpExecutorResponse -from models.workflow import WorkflowNodeExecutionStatus - -HTTP_REQUEST_DEFAULT_TIMEOUT = HttpRequestNodeTimeout( - connect=dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT, - read=dify_config.HTTP_REQUEST_MAX_READ_TIMEOUT, - write=dify_config.HTTP_REQUEST_MAX_WRITE_TIMEOUT, -) - - -class HttpRequestNode(BaseNode): - _node_data_cls = HttpRequestNodeData - _node_type = NodeType.HTTP_REQUEST - - @classmethod - def get_default_config(cls, filters: dict | None = None) -> dict: - return { - "type": "http-request", - "config": { - "method": "get", - "authorization": { - "type": "no-auth", - }, - "body": {"type": "none"}, - "timeout": { - **HTTP_REQUEST_DEFAULT_TIMEOUT.model_dump(), - "max_connect_timeout": dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT, - "max_read_timeout": dify_config.HTTP_REQUEST_MAX_READ_TIMEOUT, - "max_write_timeout": dify_config.HTTP_REQUEST_MAX_WRITE_TIMEOUT, - }, - }, - } - - def _run(self) -> NodeRunResult: - node_data: HttpRequestNodeData = cast(HttpRequestNodeData, self.node_data) - # TODO: Switch to use segment directly - if node_data.authorization.config and node_data.authorization.config.api_key: - node_data.authorization.config.api_key = parser.convert_template( - template=node_data.authorization.config.api_key, variable_pool=self.graph_runtime_state.variable_pool - ).text - - # init http executor - http_executor = None - try: - http_executor = HttpExecutor( - node_data=node_data, - timeout=self._get_request_timeout(node_data), - variable_pool=self.graph_runtime_state.variable_pool, - ) - - # invoke http executor - response = http_executor.invoke() - except Exception as e: - process_data = {} - if http_executor: - process_data = { - "request": http_executor.to_raw_request(), - } - return NodeRunResult( - status=WorkflowNodeExecutionStatus.FAILED, - error=str(e), - process_data=process_data, - ) - - files = self.extract_files(http_executor.server_url, response) - - return NodeRunResult( - status=WorkflowNodeExecutionStatus.SUCCEEDED, - outputs={ - "status_code": response.status_code, - "body": response.content if not files else "", - "headers": response.headers, - "files": files, - }, - process_data={ - "request": http_executor.to_raw_request(), - }, - ) - - @staticmethod - def _get_request_timeout(node_data: HttpRequestNodeData) -> HttpRequestNodeTimeout: - timeout = node_data.timeout - if timeout is None: - return HTTP_REQUEST_DEFAULT_TIMEOUT - - timeout.connect = timeout.connect or HTTP_REQUEST_DEFAULT_TIMEOUT.connect - timeout.read = timeout.read or HTTP_REQUEST_DEFAULT_TIMEOUT.read - timeout.write = timeout.write or HTTP_REQUEST_DEFAULT_TIMEOUT.write - return timeout - - @classmethod - def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: HttpRequestNodeData - ) -> Mapping[str, Sequence[str]]: - """ - Extract variable selector to variable mapping - :param graph_config: graph config - :param node_id: node id - :param node_data: node data - :return: - """ - try: - http_executor = HttpExecutor(node_data=node_data, timeout=HTTP_REQUEST_DEFAULT_TIMEOUT) - - variable_selectors = http_executor.variable_selectors - - variable_mapping = {} - for variable_selector in variable_selectors: - variable_mapping[node_id + "." + variable_selector.variable] = variable_selector.value_selector - - return variable_mapping - except Exception as e: - logging.exception(f"Failed to extract variable selector to variable mapping: {e}") - return {} - - def extract_files(self, url: str, response: HttpExecutorResponse) -> list[FileVar]: - """ - Extract files from response - """ - files = [] - mimetype, file_binary = response.extract_file() - - if mimetype: - # extract filename from url - filename = path.basename(url) - # extract extension if possible - extension = guess_extension(mimetype) or ".bin" - - tool_file = ToolFileManager.create_file_by_raw( - user_id=self.user_id, - tenant_id=self.tenant_id, - conversation_id=None, - file_binary=file_binary, - mimetype=mimetype, - ) - - files.append( - FileVar( - tenant_id=self.tenant_id, - type=FileType.IMAGE, - transfer_method=FileTransferMethod.TOOL_FILE, - related_id=tool_file.id, - filename=filename, - extension=extension, - mime_type=mimetype, - ) - ) - - return files diff --git a/api/core/workflow/nodes/if_else/__init__.py b/api/core/workflow/nodes/if_else/__init__.py index e69de29bb2..afa0e8112c 100644 --- a/api/core/workflow/nodes/if_else/__init__.py +++ b/api/core/workflow/nodes/if_else/__init__.py @@ -0,0 +1,3 @@ +from .if_else_node import IfElseNode + +__all__ = ["IfElseNode"] diff --git a/api/core/workflow/nodes/if_else/entities.py b/api/core/workflow/nodes/if_else/entities.py index 54c1081fd3..23f5d2cc31 100644 --- a/api/core/workflow/nodes/if_else/entities.py +++ b/api/core/workflow/nodes/if_else/entities.py @@ -1,8 +1,8 @@ from typing import Literal, Optional -from pydantic import BaseModel +from pydantic import BaseModel, Field -from core.workflow.entities.base_node_data_entities import BaseNodeData +from core.workflow.nodes.base import BaseNodeData from core.workflow.utils.condition.entities import Condition @@ -21,6 +21,6 @@ class IfElseNodeData(BaseNodeData): conditions: list[Condition] logical_operator: Optional[Literal["and", "or"]] = "and" - conditions: Optional[list[Condition]] = None + conditions: Optional[list[Condition]] = Field(default=None, deprecated=True) cases: Optional[list[Case]] = None diff --git a/api/core/workflow/nodes/if_else/if_else_node.py b/api/core/workflow/nodes/if_else/if_else_node.py index 37384202d8..6960fc045a 100644 --- a/api/core/workflow/nodes/if_else/if_else_node.py +++ b/api/core/workflow/nodes/if_else/if_else_node.py @@ -1,14 +1,19 @@ from collections.abc import Mapping, Sequence -from typing import Any, cast +from typing import Any, Literal -from core.workflow.entities.node_entities import NodeRunResult, NodeType -from core.workflow.nodes.base_node import BaseNode +from typing_extensions import deprecated + +from core.workflow.entities.node_entities import NodeRunResult +from core.workflow.entities.variable_pool import VariablePool +from core.workflow.nodes.base import BaseNode +from core.workflow.nodes.enums import NodeType from core.workflow.nodes.if_else.entities import IfElseNodeData +from core.workflow.utils.condition.entities import Condition from core.workflow.utils.condition.processor import ConditionProcessor from models.workflow import WorkflowNodeExecutionStatus -class IfElseNode(BaseNode): +class IfElseNode(BaseNode[IfElseNodeData]): _node_data_cls = IfElseNodeData _node_type = NodeType.IF_ELSE @@ -17,9 +22,6 @@ class IfElseNode(BaseNode): Run node :return: """ - node_data = self.node_data - node_data = cast(IfElseNodeData, node_data) - node_inputs: dict[str, list] = {"conditions": []} process_datas: dict[str, list] = {"condition_results": []} @@ -30,15 +32,14 @@ class IfElseNode(BaseNode): condition_processor = ConditionProcessor() try: # Check if the new cases structure is used - if node_data.cases: - for case in node_data.cases: - input_conditions, group_result = condition_processor.process_conditions( - variable_pool=self.graph_runtime_state.variable_pool, conditions=case.conditions + if self.node_data.cases: + for case in self.node_data.cases: + input_conditions, group_result, final_result = condition_processor.process_conditions( + variable_pool=self.graph_runtime_state.variable_pool, + conditions=case.conditions, + operator=case.logical_operator, ) - # Apply the logical operator for the current case - final_result = all(group_result) if case.logical_operator == "and" else any(group_result) - process_datas["condition_results"].append( { "group": case.model_dump(), @@ -53,13 +54,15 @@ class IfElseNode(BaseNode): break else: + # TODO: Update database then remove this # Fallback to old structure if cases are not defined - input_conditions, group_result = condition_processor.process_conditions( - variable_pool=self.graph_runtime_state.variable_pool, conditions=node_data.conditions + input_conditions, group_result, final_result = _should_not_use_old_function( + condition_processor=condition_processor, + variable_pool=self.graph_runtime_state.variable_pool, + conditions=self.node_data.conditions or [], + operator=self.node_data.logical_operator or "and", ) - final_result = all(group_result) if node_data.logical_operator == "and" else any(group_result) - selected_case_id = "true" if final_result else "false" process_datas["condition_results"].append( @@ -87,7 +90,11 @@ class IfElseNode(BaseNode): @classmethod def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: IfElseNodeData + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: IfElseNodeData, ) -> Mapping[str, Sequence[str]]: """ Extract variable selector to variable mapping @@ -97,3 +104,18 @@ class IfElseNode(BaseNode): :return: """ return {} + + +@deprecated("This function is deprecated. You should use the new cases structure.") +def _should_not_use_old_function( + *, + condition_processor: ConditionProcessor, + variable_pool: VariablePool, + conditions: list[Condition], + operator: Literal["and", "or"], +): + return condition_processor.process_conditions( + variable_pool=variable_pool, + conditions=conditions, + operator=operator, + ) diff --git a/api/core/workflow/nodes/iteration/__init__.py b/api/core/workflow/nodes/iteration/__init__.py index e69de29bb2..5bb87aaffa 100644 --- a/api/core/workflow/nodes/iteration/__init__.py +++ b/api/core/workflow/nodes/iteration/__init__.py @@ -0,0 +1,5 @@ +from .entities import IterationNodeData +from .iteration_node import IterationNode +from .iteration_start_node import IterationStartNode + +__all__ = ["IterationNode", "IterationNodeData", "IterationStartNode"] diff --git a/api/core/workflow/nodes/iteration/entities.py b/api/core/workflow/nodes/iteration/entities.py index 3c2c189159..4afc870e50 100644 --- a/api/core/workflow/nodes/iteration/entities.py +++ b/api/core/workflow/nodes/iteration/entities.py @@ -1,6 +1,8 @@ from typing import Any, Optional -from core.workflow.entities.base_node_data_entities import BaseIterationNodeData, BaseIterationState, BaseNodeData +from pydantic import Field + +from core.workflow.nodes.base import BaseIterationNodeData, BaseIterationState, BaseNodeData class IterationNodeData(BaseIterationNodeData): @@ -26,7 +28,7 @@ class IterationState(BaseIterationState): Iteration State. """ - outputs: list[Any] = None + outputs: list[Any] = Field(default_factory=list) current_output: Optional[Any] = None class MetaData(BaseIterationState.MetaData): diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index 01bb4e9076..af79da9215 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -5,7 +5,8 @@ from typing import Any, cast from configs import dify_config from core.model_runtime.utils.encoders import jsonable_encoder -from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult, NodeType +from core.variables import IntegerSegment +from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult from core.workflow.graph_engine.entities.event import ( BaseGraphEvent, BaseNodeEvent, @@ -20,15 +21,16 @@ from core.workflow.graph_engine.entities.event import ( NodeRunSucceededEvent, ) from core.workflow.graph_engine.entities.graph import Graph -from core.workflow.nodes.base_node import BaseNode -from core.workflow.nodes.event import RunCompletedEvent, RunEvent +from core.workflow.nodes.base import BaseNode +from core.workflow.nodes.enums import NodeType +from core.workflow.nodes.event import NodeEvent, RunCompletedEvent from core.workflow.nodes.iteration.entities import IterationNodeData from models.workflow import WorkflowNodeExecutionStatus logger = logging.getLogger(__name__) -class IterationNode(BaseNode): +class IterationNode(BaseNode[IterationNodeData]): """ Iteration Node. """ @@ -36,16 +38,24 @@ class IterationNode(BaseNode): _node_data_cls = IterationNodeData _node_type = NodeType.ITERATION - def _run(self) -> Generator[RunEvent | InNodeEvent, None, None]: + def _run(self) -> Generator[NodeEvent | InNodeEvent, None, None]: """ Run the node. """ - self.node_data = cast(IterationNodeData, self.node_data) iterator_list_segment = self.graph_runtime_state.variable_pool.get(self.node_data.iterator_selector) if not iterator_list_segment: raise ValueError(f"Iterator variable {self.node_data.iterator_selector} not found") + if len(iterator_list_segment.value) == 0: + yield RunCompletedEvent( + run_result=NodeRunResult( + status=WorkflowNodeExecutionStatus.SUCCEEDED, + outputs={"output": []}, + ) + ) + return + iterator_list_value = iterator_list_segment.to_object() if not isinstance(iterator_list_value, list): @@ -138,9 +148,16 @@ class IterationNode(BaseNode): if NodeRunMetadataKey.ITERATION_ID not in metadata: metadata[NodeRunMetadataKey.ITERATION_ID] = self.node_id - metadata[NodeRunMetadataKey.ITERATION_INDEX] = variable_pool.get_any( - [self.node_id, "index"] - ) + index_variable = variable_pool.get([self.node_id, "index"]) + if not isinstance(index_variable, IntegerSegment): + yield RunCompletedEvent( + run_result=NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + error=f"Invalid index variable type: {type(index_variable)}", + ) + ) + return + metadata[NodeRunMetadataKey.ITERATION_INDEX] = index_variable.value event.route_node_state.node_run_result.metadata = metadata yield event @@ -172,19 +189,28 @@ class IterationNode(BaseNode): yield event # append to iteration output variable list - current_iteration_output = variable_pool.get_any(self.node_data.output_selector) + current_iteration_output_variable = variable_pool.get(self.node_data.output_selector) + if current_iteration_output_variable is None: + yield RunCompletedEvent( + run_result=NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + error=f"Iteration output variable {self.node_data.output_selector} not found", + ) + ) + return + current_iteration_output = current_iteration_output_variable.to_object() outputs.append(current_iteration_output) # remove all nodes outputs from variable pool for node_id in iteration_graph.node_ids: - variable_pool.remove_node(node_id) + variable_pool.remove([node_id]) # move to next iteration - current_index = variable_pool.get([self.node_id, "index"]) - if current_index is None: + current_index_variable = variable_pool.get([self.node_id, "index"]) + if not isinstance(current_index_variable, IntegerSegment): raise ValueError(f"iteration {self.node_id} current index not found") - next_index = int(current_index.to_object()) + 1 + next_index = current_index_variable.value + 1 variable_pool.add([self.node_id, "index"], next_index) if next_index < len(iterator_list_value): @@ -196,9 +222,7 @@ class IterationNode(BaseNode): iteration_node_type=self.node_type, iteration_node_data=self.node_data, index=next_index, - pre_iteration_output=jsonable_encoder(current_iteration_output) - if current_iteration_output - else None, + pre_iteration_output=jsonable_encoder(current_iteration_output), ) yield IterationRunSucceededEvent( @@ -247,7 +271,11 @@ class IterationNode(BaseNode): @classmethod def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: IterationNodeData + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: IterationNodeData, ) -> Mapping[str, Sequence[str]]: """ Extract variable selector to variable mapping @@ -273,15 +301,13 @@ class IterationNode(BaseNode): # variable selector to variable mapping try: # Get node class - from core.workflow.nodes.node_mapping import node_classes + from core.workflow.nodes.node_mapping import node_type_classes_mapping - node_type = NodeType.value_of(sub_node_config.get("data", {}).get("type")) - node_cls = node_classes.get(node_type) + node_type = NodeType(sub_node_config.get("data", {}).get("type")) + node_cls = node_type_classes_mapping.get(node_type) if not node_cls: continue - node_cls = cast(BaseNode, node_cls) - sub_node_variable_mapping = node_cls.extract_variable_selector_to_variable_mapping( graph_config=graph_config, config=sub_node_config ) diff --git a/api/core/workflow/nodes/iteration/iteration_start_node.py b/api/core/workflow/nodes/iteration/iteration_start_node.py index 88b9665ac6..6ab7c30106 100644 --- a/api/core/workflow/nodes/iteration/iteration_start_node.py +++ b/api/core/workflow/nodes/iteration/iteration_start_node.py @@ -1,8 +1,9 @@ from collections.abc import Mapping, Sequence from typing import Any -from core.workflow.entities.node_entities import NodeRunResult, NodeType -from core.workflow.nodes.base_node import BaseNode +from core.workflow.entities.node_entities import NodeRunResult +from core.workflow.nodes.base import BaseNode +from core.workflow.nodes.enums import NodeType from core.workflow.nodes.iteration.entities import IterationNodeData, IterationStartNodeData from models.workflow import WorkflowNodeExecutionStatus diff --git a/api/core/workflow/nodes/knowledge_retrieval/__init__.py b/api/core/workflow/nodes/knowledge_retrieval/__init__.py index e69de29bb2..4d4a4cbd9f 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/__init__.py +++ b/api/core/workflow/nodes/knowledge_retrieval/__init__.py @@ -0,0 +1,3 @@ +from .knowledge_retrieval_node import KnowledgeRetrievalNode + +__all__ = ["KnowledgeRetrievalNode"] diff --git a/api/core/workflow/nodes/knowledge_retrieval/entities.py b/api/core/workflow/nodes/knowledge_retrieval/entities.py index 1cd88039b1..e8972d1381 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/entities.py +++ b/api/core/workflow/nodes/knowledge_retrieval/entities.py @@ -2,7 +2,7 @@ from typing import Any, Literal, Optional from pydantic import BaseModel -from core.workflow.entities.base_node_data_entities import BaseNodeData +from core.workflow.nodes.base import BaseNodeData class RerankingModelConfig(BaseModel): diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 8cd208d7fc..2a5795a3ed 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -14,8 +14,10 @@ from core.model_runtime.entities.model_entities import ModelFeature, ModelType from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.rag.retrieval.dataset_retrieval import DatasetRetrieval from core.rag.retrieval.retrieval_methods import RetrievalMethod -from core.workflow.entities.node_entities import NodeRunResult, NodeType -from core.workflow.nodes.base_node import BaseNode +from core.variables import StringSegment +from core.workflow.entities.node_entities import NodeRunResult +from core.workflow.nodes.base import BaseNode +from core.workflow.nodes.enums import NodeType from core.workflow.nodes.knowledge_retrieval.entities import KnowledgeRetrievalNodeData from extensions.ext_database import db from models.dataset import Dataset, Document, DocumentSegment @@ -32,16 +34,20 @@ default_retrieval_model = { } -class KnowledgeRetrievalNode(BaseNode): +class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]): _node_data_cls = KnowledgeRetrievalNodeData - node_type = NodeType.KNOWLEDGE_RETRIEVAL + _node_type = NodeType.KNOWLEDGE_RETRIEVAL def _run(self) -> NodeRunResult: - node_data = cast(KnowledgeRetrievalNodeData, self.node_data) - # extract variables - variable = self.graph_runtime_state.variable_pool.get_any(node_data.query_variable_selector) - query = variable + variable = self.graph_runtime_state.variable_pool.get(self.node_data.query_variable_selector) + if not isinstance(variable, StringSegment): + return NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs={}, + error="Query variable is not string type.", + ) + query = variable.value variables = {"query": query} if not query: return NodeRunResult( @@ -49,7 +55,7 @@ class KnowledgeRetrievalNode(BaseNode): ) # retrieve knowledge try: - results = self._fetch_dataset_retriever(node_data=node_data, query=query) + results = self._fetch_dataset_retriever(node_data=self.node_data, query=query) outputs = {"result": results} return NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=variables, process_data=None, outputs=outputs @@ -244,7 +250,11 @@ class KnowledgeRetrievalNode(BaseNode): @classmethod def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: KnowledgeRetrievalNodeData + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: KnowledgeRetrievalNodeData, ) -> Mapping[str, Sequence[str]]: """ Extract variable selector to variable mapping diff --git a/api/core/workflow/nodes/llm/__init__.py b/api/core/workflow/nodes/llm/__init__.py index e69de29bb2..f7bc713f63 100644 --- a/api/core/workflow/nodes/llm/__init__.py +++ b/api/core/workflow/nodes/llm/__init__.py @@ -0,0 +1,17 @@ +from .entities import ( + LLMNodeChatModelMessage, + LLMNodeCompletionModelPromptTemplate, + LLMNodeData, + ModelConfig, + VisionConfig, +) +from .node import LLMNode + +__all__ = [ + "LLMNode", + "LLMNodeChatModelMessage", + "LLMNodeCompletionModelPromptTemplate", + "LLMNodeData", + "ModelConfig", + "VisionConfig", +] diff --git a/api/core/workflow/nodes/llm/entities.py b/api/core/workflow/nodes/llm/entities.py index 93ee0ac250..a25d563fe0 100644 --- a/api/core/workflow/nodes/llm/entities.py +++ b/api/core/workflow/nodes/llm/entities.py @@ -1,17 +1,15 @@ -from typing import Any, Literal, Optional, Union +from collections.abc import Sequence +from typing import Any, Optional -from pydantic import BaseModel +from pydantic import BaseModel, Field, field_validator +from core.model_runtime.entities import ImagePromptMessageContent from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate, MemoryConfig -from core.workflow.entities.base_node_data_entities import BaseNodeData from core.workflow.entities.variable_entities import VariableSelector +from core.workflow.nodes.base import BaseNodeData class ModelConfig(BaseModel): - """ - Model Config. - """ - provider: str name: str mode: str @@ -19,62 +17,43 @@ class ModelConfig(BaseModel): class ContextConfig(BaseModel): - """ - Context Config. - """ - enabled: bool variable_selector: Optional[list[str]] = None +class VisionConfigOptions(BaseModel): + variable_selector: Sequence[str] = Field(default_factory=lambda: ["sys", "files"]) + detail: ImagePromptMessageContent.DETAIL = ImagePromptMessageContent.DETAIL.HIGH + + class VisionConfig(BaseModel): - """ - Vision Config. - """ + enabled: bool = False + configs: VisionConfigOptions = Field(default_factory=VisionConfigOptions) - class Configs(BaseModel): - """ - Configs. - """ - - detail: Literal["low", "high"] - - enabled: bool - configs: Optional[Configs] = None + @field_validator("configs", mode="before") + @classmethod + def convert_none_configs(cls, v: Any): + if v is None: + return VisionConfigOptions() + return v class PromptConfig(BaseModel): - """ - Prompt Config. - """ - jinja2_variables: Optional[list[VariableSelector]] = None class LLMNodeChatModelMessage(ChatModelMessage): - """ - LLM Node Chat Model Message. - """ - jinja2_text: Optional[str] = None class LLMNodeCompletionModelPromptTemplate(CompletionModelPromptTemplate): - """ - LLM Node Chat Model Prompt Template. - """ - jinja2_text: Optional[str] = None class LLMNodeData(BaseNodeData): - """ - LLM Node Data. - """ - model: ModelConfig - prompt_template: Union[list[LLMNodeChatModelMessage], LLMNodeCompletionModelPromptTemplate] + prompt_template: Sequence[LLMNodeChatModelMessage] | LLMNodeCompletionModelPromptTemplate prompt_config: Optional[PromptConfig] = None memory: Optional[MemoryConfig] = None context: ContextConfig - vision: VisionConfig + vision: VisionConfig = Field(default_factory=VisionConfig) diff --git a/api/core/workflow/nodes/llm/llm_node.py b/api/core/workflow/nodes/llm/llm_node.py deleted file mode 100644 index 3d336b0b0b..0000000000 --- a/api/core/workflow/nodes/llm/llm_node.py +++ /dev/null @@ -1,774 +0,0 @@ -import json -from collections.abc import Generator, Mapping, Sequence -from copy import deepcopy -from typing import TYPE_CHECKING, Any, Optional, cast - -from pydantic import BaseModel - -from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity -from core.entities.model_entities import ModelStatus -from core.entities.provider_entities import QuotaUnit -from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError -from core.memory.token_buffer_memory import TokenBufferMemory -from core.model_manager import ModelInstance, ModelManager -from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage -from core.model_runtime.entities.message_entities import ( - ImagePromptMessageContent, - PromptMessage, - PromptMessageContentType, -) -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.model_runtime.utils.encoders import jsonable_encoder -from core.prompt.advanced_prompt_transform import AdvancedPromptTransform -from core.prompt.entities.advanced_prompt_entities import CompletionModelPromptTemplate, MemoryConfig -from core.prompt.utils.prompt_message_util import PromptMessageUtil -from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult, NodeType -from core.workflow.entities.variable_pool import VariablePool -from core.workflow.enums import SystemVariableKey -from core.workflow.graph_engine.entities.event import InNodeEvent -from core.workflow.nodes.base_node import BaseNode -from core.workflow.nodes.event import RunCompletedEvent, RunEvent, RunRetrieverResourceEvent, RunStreamChunkEvent -from core.workflow.nodes.llm.entities import ( - LLMNodeChatModelMessage, - LLMNodeCompletionModelPromptTemplate, - LLMNodeData, - ModelConfig, -) -from core.workflow.utils.variable_template_parser import VariableTemplateParser -from extensions.ext_database import db -from models.model import Conversation -from models.provider import Provider, ProviderType -from models.workflow import WorkflowNodeExecutionStatus - -if TYPE_CHECKING: - from core.file.file_obj import FileVar - - -class ModelInvokeCompleted(BaseModel): - """ - Model invoke completed - """ - - text: str - usage: LLMUsage - finish_reason: Optional[str] = None - - -class LLMNode(BaseNode): - _node_data_cls = LLMNodeData - _node_type = NodeType.LLM - - def _run(self) -> Generator[RunEvent | InNodeEvent, None, None]: - """ - Run node - :return: - """ - node_data = cast(LLMNodeData, deepcopy(self.node_data)) - variable_pool = self.graph_runtime_state.variable_pool - - node_inputs = None - process_data = None - - try: - # init messages template - node_data.prompt_template = self._transform_chat_messages(node_data.prompt_template) - - # fetch variables and fetch values from variable pool - inputs = self._fetch_inputs(node_data, variable_pool) - - # fetch jinja2 inputs - jinja_inputs = self._fetch_jinja_inputs(node_data, variable_pool) - - # merge inputs - inputs.update(jinja_inputs) - - node_inputs = {} - - # fetch files - files = self._fetch_files(node_data, variable_pool) - - if files: - node_inputs["#files#"] = [file.to_dict() for file in files] - - # fetch context value - generator = self._fetch_context(node_data, variable_pool) - context = None - for event in generator: - if isinstance(event, RunRetrieverResourceEvent): - context = event.context - yield event - - if context: - node_inputs["#context#"] = context # type: ignore - - # fetch model config - model_instance, model_config = self._fetch_model_config(node_data.model) - - # fetch memory - memory = self._fetch_memory(node_data.memory, variable_pool, model_instance) - - # fetch prompt messages - prompt_messages, stop = self._fetch_prompt_messages( - node_data=node_data, - query=variable_pool.get_any(["sys", SystemVariableKey.QUERY.value]) if node_data.memory else None, - query_prompt_template=node_data.memory.query_prompt_template if node_data.memory else None, - inputs=inputs, - files=files, - context=context, - memory=memory, - model_config=model_config, - ) - - process_data = { - "model_mode": model_config.mode, - "prompts": PromptMessageUtil.prompt_messages_to_prompt_for_saving( - model_mode=model_config.mode, prompt_messages=prompt_messages - ), - "model_provider": model_config.provider, - "model_name": model_config.model, - } - - # handle invoke result - generator = self._invoke_llm( - node_data_model=node_data.model, - model_instance=model_instance, - prompt_messages=prompt_messages, - stop=stop, - ) - - result_text = "" - usage = LLMUsage.empty_usage() - finish_reason = None - for event in generator: - if isinstance(event, RunStreamChunkEvent): - yield event - elif isinstance(event, ModelInvokeCompleted): - result_text = event.text - usage = event.usage - finish_reason = event.finish_reason - break - except Exception as e: - yield RunCompletedEvent( - run_result=NodeRunResult( - status=WorkflowNodeExecutionStatus.FAILED, - error=str(e), - inputs=node_inputs, - process_data=process_data, - ) - ) - return - - outputs = {"text": result_text, "usage": jsonable_encoder(usage), "finish_reason": finish_reason} - - yield RunCompletedEvent( - run_result=NodeRunResult( - status=WorkflowNodeExecutionStatus.SUCCEEDED, - inputs=node_inputs, - process_data=process_data, - outputs=outputs, - metadata={ - NodeRunMetadataKey.TOTAL_TOKENS: usage.total_tokens, - NodeRunMetadataKey.TOTAL_PRICE: usage.total_price, - NodeRunMetadataKey.CURRENCY: usage.currency, - }, - llm_usage=usage, - ) - ) - - def _invoke_llm( - self, - node_data_model: ModelConfig, - model_instance: ModelInstance, - prompt_messages: list[PromptMessage], - stop: Optional[list[str]] = None, - ) -> Generator[RunEvent | ModelInvokeCompleted, None, None]: - """ - Invoke large language model - :param node_data_model: node data model - :param model_instance: model instance - :param prompt_messages: prompt messages - :param stop: stop - :return: - """ - db.session.close() - - invoke_result = model_instance.invoke_llm( - prompt_messages=prompt_messages, - model_parameters=node_data_model.completion_params, - stop=stop, - stream=True, - user=self.user_id, - ) - - # handle invoke result - generator = self._handle_invoke_result(invoke_result=invoke_result) - - usage = LLMUsage.empty_usage() - for event in generator: - yield event - if isinstance(event, ModelInvokeCompleted): - usage = event.usage - - # deduct quota - self.deduct_llm_quota(tenant_id=self.tenant_id, model_instance=model_instance, usage=usage) - - def _handle_invoke_result( - self, invoke_result: LLMResult | Generator - ) -> Generator[RunEvent | ModelInvokeCompleted, None, None]: - """ - Handle invoke result - :param invoke_result: invoke result - :return: - """ - if isinstance(invoke_result, LLMResult): - return - - model = None - prompt_messages: list[PromptMessage] = [] - full_text = "" - usage = None - finish_reason = None - for result in invoke_result: - text = result.delta.message.content - full_text += text - - yield RunStreamChunkEvent(chunk_content=text, from_variable_selector=[self.node_id, "text"]) - - if not model: - model = result.model - - if not prompt_messages: - prompt_messages = result.prompt_messages - - if not usage and result.delta.usage: - usage = result.delta.usage - - if not finish_reason and result.delta.finish_reason: - finish_reason = result.delta.finish_reason - - if not usage: - usage = LLMUsage.empty_usage() - - yield ModelInvokeCompleted(text=full_text, usage=usage, finish_reason=finish_reason) - - def _transform_chat_messages( - self, messages: list[LLMNodeChatModelMessage] | LLMNodeCompletionModelPromptTemplate - ) -> list[LLMNodeChatModelMessage] | LLMNodeCompletionModelPromptTemplate: - """ - Transform chat messages - - :param messages: chat messages - :return: - """ - - if isinstance(messages, LLMNodeCompletionModelPromptTemplate): - if messages.edition_type == "jinja2" and messages.jinja2_text: - messages.text = messages.jinja2_text - - return messages - - for message in messages: - if message.edition_type == "jinja2" and message.jinja2_text: - message.text = message.jinja2_text - - return messages - - def _fetch_jinja_inputs(self, node_data: LLMNodeData, variable_pool: VariablePool) -> dict[str, str]: - """ - Fetch jinja inputs - :param node_data: node data - :param variable_pool: variable pool - :return: - """ - variables = {} - - if not node_data.prompt_config: - return variables - - for variable_selector in node_data.prompt_config.jinja2_variables or []: - variable = variable_selector.variable - value = variable_pool.get_any(variable_selector.value_selector) - - def parse_dict(d: dict) -> str: - """ - Parse dict into string - """ - # check if it's a context structure - if "metadata" in d and "_source" in d["metadata"] and "content" in d: - return d["content"] - - # else, parse the dict - try: - return json.dumps(d, ensure_ascii=False) - except Exception: - return str(d) - - if isinstance(value, str): - value = value - elif isinstance(value, list): - result = "" - for item in value: - if isinstance(item, dict): - result += parse_dict(item) - elif isinstance(item, str): - result += item - elif isinstance(item, int | float): - result += str(item) - else: - result += str(item) - result += "\n" - value = result.strip() - elif isinstance(value, dict): - value = parse_dict(value) - elif isinstance(value, int | float): - value = str(value) - else: - value = str(value) - - variables[variable] = value - - return variables - - def _fetch_inputs(self, node_data: LLMNodeData, variable_pool: VariablePool) -> dict[str, str]: - """ - Fetch inputs - :param node_data: node data - :param variable_pool: variable pool - :return: - """ - inputs = {} - prompt_template = node_data.prompt_template - - variable_selectors = [] - if isinstance(prompt_template, list): - for prompt in prompt_template: - variable_template_parser = VariableTemplateParser(template=prompt.text) - variable_selectors.extend(variable_template_parser.extract_variable_selectors()) - elif isinstance(prompt_template, CompletionModelPromptTemplate): - variable_template_parser = VariableTemplateParser(template=prompt_template.text) - variable_selectors = variable_template_parser.extract_variable_selectors() - - for variable_selector in variable_selectors: - variable_value = variable_pool.get_any(variable_selector.value_selector) - if variable_value is None: - raise ValueError(f"Variable {variable_selector.variable} not found") - - inputs[variable_selector.variable] = variable_value - - memory = node_data.memory - if memory and memory.query_prompt_template: - query_variable_selectors = VariableTemplateParser( - template=memory.query_prompt_template - ).extract_variable_selectors() - for variable_selector in query_variable_selectors: - variable_value = variable_pool.get_any(variable_selector.value_selector) - if variable_value is None: - raise ValueError(f"Variable {variable_selector.variable} not found") - - inputs[variable_selector.variable] = variable_value - - return inputs - - def _fetch_files(self, node_data: LLMNodeData, variable_pool: VariablePool) -> list["FileVar"]: - """ - Fetch files - :param node_data: node data - :param variable_pool: variable pool - :return: - """ - if not node_data.vision.enabled: - return [] - - files = variable_pool.get_any(["sys", SystemVariableKey.FILES.value]) - if not files: - return [] - - return files - - def _fetch_context(self, node_data: LLMNodeData, variable_pool: VariablePool) -> Generator[RunEvent, None, None]: - """ - Fetch context - :param node_data: node data - :param variable_pool: variable pool - :return: - """ - if not node_data.context.enabled: - return - - if not node_data.context.variable_selector: - return - - context_value = variable_pool.get_any(node_data.context.variable_selector) - if context_value: - if isinstance(context_value, str): - yield RunRetrieverResourceEvent(retriever_resources=[], context=context_value) - elif isinstance(context_value, list): - context_str = "" - original_retriever_resource = [] - for item in context_value: - if isinstance(item, str): - context_str += item + "\n" - else: - if "content" not in item: - raise ValueError(f"Invalid context structure: {item}") - - context_str += item["content"] + "\n" - - retriever_resource = self._convert_to_original_retriever_resource(item) - if retriever_resource: - original_retriever_resource.append(retriever_resource) - - yield RunRetrieverResourceEvent( - retriever_resources=original_retriever_resource, context=context_str.strip() - ) - - def _convert_to_original_retriever_resource(self, context_dict: dict) -> Optional[dict]: - """ - Convert to original retriever resource, temp. - :param context_dict: context dict - :return: - """ - if ( - "metadata" in context_dict - and "_source" in context_dict["metadata"] - and context_dict["metadata"]["_source"] == "knowledge" - ): - metadata = context_dict.get("metadata", {}) - - source = { - "position": metadata.get("position"), - "dataset_id": metadata.get("dataset_id"), - "dataset_name": metadata.get("dataset_name"), - "document_id": metadata.get("document_id"), - "document_name": metadata.get("document_name"), - "data_source_type": metadata.get("document_data_source_type"), - "segment_id": metadata.get("segment_id"), - "retriever_from": metadata.get("retriever_from"), - "score": metadata.get("score"), - "hit_count": metadata.get("segment_hit_count"), - "word_count": metadata.get("segment_word_count"), - "segment_position": metadata.get("segment_position"), - "index_node_hash": metadata.get("segment_index_node_hash"), - "content": context_dict.get("content"), - } - - return source - - return None - - def _fetch_model_config( - self, node_data_model: ModelConfig - ) -> tuple[ModelInstance, ModelConfigWithCredentialsEntity]: - """ - Fetch model config - :param node_data_model: node data model - :return: - """ - model_name = node_data_model.name - provider_name = node_data_model.provider - - model_manager = ModelManager() - model_instance = model_manager.get_model_instance( - tenant_id=self.tenant_id, model_type=ModelType.LLM, provider=provider_name, model=model_name - ) - - provider_model_bundle = model_instance.provider_model_bundle - model_type_instance = model_instance.model_type_instance - model_type_instance = cast(LargeLanguageModel, model_type_instance) - - model_credentials = model_instance.credentials - - # check model - provider_model = provider_model_bundle.configuration.get_provider_model( - model=model_name, model_type=ModelType.LLM - ) - - if provider_model is None: - raise ValueError(f"Model {model_name} not exist.") - - if provider_model.status == ModelStatus.NO_CONFIGURE: - raise ProviderTokenNotInitError(f"Model {model_name} credentials is not initialized.") - elif provider_model.status == ModelStatus.NO_PERMISSION: - raise ModelCurrentlyNotSupportError(f"Dify Hosted OpenAI {model_name} currently not support.") - elif provider_model.status == ModelStatus.QUOTA_EXCEEDED: - raise QuotaExceededError(f"Model provider {provider_name} quota exceeded.") - - # model config - completion_params = node_data_model.completion_params - stop = [] - if "stop" in completion_params: - stop = completion_params["stop"] - del completion_params["stop"] - - # get model mode - model_mode = node_data_model.mode - if not model_mode: - raise ValueError("LLM mode is required.") - - model_schema = model_type_instance.get_model_schema(model_name, model_credentials) - - if not model_schema: - raise ValueError(f"Model {model_name} not exist.") - - return model_instance, ModelConfigWithCredentialsEntity( - provider=provider_name, - model=model_name, - model_schema=model_schema, - mode=model_mode, - provider_model_bundle=provider_model_bundle, - credentials=model_credentials, - parameters=completion_params, - stop=stop, - ) - - def _fetch_memory( - self, node_data_memory: Optional[MemoryConfig], variable_pool: VariablePool, model_instance: ModelInstance - ) -> Optional[TokenBufferMemory]: - """ - Fetch memory - :param node_data_memory: node data memory - :param variable_pool: variable pool - :return: - """ - if not node_data_memory: - return None - - # get conversation id - conversation_id = variable_pool.get_any(["sys", SystemVariableKey.CONVERSATION_ID.value]) - if conversation_id is None: - return None - - # get conversation - conversation = ( - db.session.query(Conversation) - .filter(Conversation.app_id == self.app_id, Conversation.id == conversation_id) - .first() - ) - - if not conversation: - return None - - memory = TokenBufferMemory(conversation=conversation, model_instance=model_instance) - - return memory - - def _fetch_prompt_messages( - self, - node_data: LLMNodeData, - query: Optional[str], - query_prompt_template: Optional[str], - inputs: dict[str, str], - files: list["FileVar"], - context: Optional[str], - memory: Optional[TokenBufferMemory], - model_config: ModelConfigWithCredentialsEntity, - ) -> tuple[list[PromptMessage], Optional[list[str]]]: - """ - Fetch prompt messages - :param node_data: node data - :param query: query - :param query_prompt_template: query prompt template - :param inputs: inputs - :param files: files - :param context: context - :param memory: memory - :param model_config: model config - :return: - """ - prompt_transform = AdvancedPromptTransform(with_variable_tmpl=True) - prompt_messages = prompt_transform.get_prompt( - prompt_template=node_data.prompt_template, - inputs=inputs, - query=query or "", - files=files, - context=context, - memory_config=node_data.memory, - memory=memory, - model_config=model_config, - query_prompt_template=query_prompt_template, - ) - stop = model_config.stop - - vision_enabled = node_data.vision.enabled - vision_detail = node_data.vision.configs.detail if node_data.vision.configs else None - filtered_prompt_messages = [] - for prompt_message in prompt_messages: - if prompt_message.is_empty(): - continue - - if not isinstance(prompt_message.content, str): - prompt_message_content = [] - for content_item in prompt_message.content: - if ( - vision_enabled - and content_item.type == PromptMessageContentType.IMAGE - and isinstance(content_item, ImagePromptMessageContent) - ): - # Override vision config if LLM node has vision config - if vision_detail: - content_item.detail = ImagePromptMessageContent.DETAIL(vision_detail) - prompt_message_content.append(content_item) - elif content_item.type == PromptMessageContentType.TEXT: - prompt_message_content.append(content_item) - - if len(prompt_message_content) > 1: - prompt_message.content = prompt_message_content - elif ( - len(prompt_message_content) == 1 and prompt_message_content[0].type == PromptMessageContentType.TEXT - ): - prompt_message.content = prompt_message_content[0].data - - filtered_prompt_messages.append(prompt_message) - - if not filtered_prompt_messages: - raise ValueError( - "No prompt found in the LLM configuration. " - "Please ensure a prompt is properly configured before proceeding." - ) - - return filtered_prompt_messages, stop - - @classmethod - def deduct_llm_quota(cls, tenant_id: str, model_instance: ModelInstance, usage: LLMUsage) -> None: - """ - Deduct LLM quota - :param tenant_id: tenant id - :param model_instance: model instance - :param usage: usage - :return: - """ - provider_model_bundle = model_instance.provider_model_bundle - provider_configuration = provider_model_bundle.configuration - - if provider_configuration.using_provider_type != ProviderType.SYSTEM: - return - - system_configuration = provider_configuration.system_configuration - - quota_unit = None - for quota_configuration in system_configuration.quota_configurations: - if quota_configuration.quota_type == system_configuration.current_quota_type: - quota_unit = quota_configuration.quota_unit - - if quota_configuration.quota_limit == -1: - return - - break - - used_quota = None - if quota_unit: - if quota_unit == QuotaUnit.TOKENS: - used_quota = usage.total_tokens - elif quota_unit == QuotaUnit.CREDITS: - used_quota = 1 - - if "gpt-4" in model_instance.model: - used_quota = 20 - else: - used_quota = 1 - - if used_quota is not None: - db.session.query(Provider).filter( - Provider.tenant_id == tenant_id, - Provider.provider_name == model_instance.provider, - Provider.provider_type == ProviderType.SYSTEM.value, - Provider.quota_type == system_configuration.current_quota_type.value, - Provider.quota_limit > Provider.quota_used, - ).update({"quota_used": Provider.quota_used + used_quota}) - db.session.commit() - - @classmethod - def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: LLMNodeData - ) -> Mapping[str, Sequence[str]]: - """ - Extract variable selector to variable mapping - :param graph_config: graph config - :param node_id: node id - :param node_data: node data - :return: - """ - prompt_template = node_data.prompt_template - - variable_selectors = [] - if isinstance(prompt_template, list): - for prompt in prompt_template: - if prompt.edition_type != "jinja2": - variable_template_parser = VariableTemplateParser(template=prompt.text) - variable_selectors.extend(variable_template_parser.extract_variable_selectors()) - else: - if prompt_template.edition_type != "jinja2": - variable_template_parser = VariableTemplateParser(template=prompt_template.text) - variable_selectors = variable_template_parser.extract_variable_selectors() - - variable_mapping = {} - for variable_selector in variable_selectors: - variable_mapping[variable_selector.variable] = variable_selector.value_selector - - memory = node_data.memory - if memory and memory.query_prompt_template: - query_variable_selectors = VariableTemplateParser( - template=memory.query_prompt_template - ).extract_variable_selectors() - for variable_selector in query_variable_selectors: - variable_mapping[variable_selector.variable] = variable_selector.value_selector - - if node_data.context.enabled: - variable_mapping["#context#"] = node_data.context.variable_selector - - if node_data.vision.enabled: - variable_mapping["#files#"] = ["sys", SystemVariableKey.FILES.value] - - if node_data.memory: - variable_mapping["#sys.query#"] = ["sys", SystemVariableKey.QUERY.value] - - if node_data.prompt_config: - enable_jinja = False - - if isinstance(prompt_template, list): - for prompt in prompt_template: - if prompt.edition_type == "jinja2": - enable_jinja = True - break - else: - if prompt_template.edition_type == "jinja2": - enable_jinja = True - - if enable_jinja: - for variable_selector in node_data.prompt_config.jinja2_variables or []: - variable_mapping[variable_selector.variable] = variable_selector.value_selector - - variable_mapping = {node_id + "." + key: value for key, value in variable_mapping.items()} - - return variable_mapping - - @classmethod - def get_default_config(cls, filters: Optional[dict] = None) -> dict: - """ - Get default config of node. - :param filters: filter by node config parameters. - :return: - """ - return { - "type": "llm", - "config": { - "prompt_templates": { - "chat_model": { - "prompts": [ - {"role": "system", "text": "You are a helpful AI assistant.", "edition_type": "basic"} - ] - }, - "completion_model": { - "conversation_histories_role": {"user_prefix": "Human", "assistant_prefix": "Assistant"}, - "prompt": { - "text": "Here is the chat histories between human and assistant, inside " - " XML tags.\n\n\n{{" - "#histories#}}\n\n\n\nHuman: {{#sys.query#}}\n\nAssistant:", - "edition_type": "basic", - }, - "stop": ["Human:"], - }, - } - }, - } diff --git a/api/core/workflow/nodes/loop/entities.py b/api/core/workflow/nodes/loop/entities.py index a8a0debe64..b7cd7a948e 100644 --- a/api/core/workflow/nodes/loop/entities.py +++ b/api/core/workflow/nodes/loop/entities.py @@ -1,4 +1,4 @@ -from core.workflow.entities.base_node_data_entities import BaseIterationNodeData, BaseIterationState +from core.workflow.nodes.base import BaseIterationNodeData, BaseIterationState class LoopNodeData(BaseIterationNodeData): diff --git a/api/core/workflow/nodes/loop/loop_node.py b/api/core/workflow/nodes/loop/loop_node.py index fbc68b79cb..6fdff96602 100644 --- a/api/core/workflow/nodes/loop/loop_node.py +++ b/api/core/workflow/nodes/loop/loop_node.py @@ -1,12 +1,12 @@ from typing import Any -from core.workflow.entities.node_entities import NodeType -from core.workflow.nodes.base_node import BaseNode +from core.workflow.nodes.base import BaseNode +from core.workflow.nodes.enums import NodeType from core.workflow.nodes.loop.entities import LoopNodeData, LoopState from core.workflow.utils.condition.entities import Condition -class LoopNode(BaseNode): +class LoopNode(BaseNode[LoopNodeData]): """ Loop Node. """ diff --git a/api/core/workflow/nodes/node_mapping.py b/api/core/workflow/nodes/node_mapping.py index b98525e86e..c13b5ff76f 100644 --- a/api/core/workflow/nodes/node_mapping.py +++ b/api/core/workflow/nodes/node_mapping.py @@ -1,22 +1,24 @@ -from core.workflow.entities.node_entities import NodeType -from core.workflow.nodes.answer.answer_node import AnswerNode -from core.workflow.nodes.code.code_node import CodeNode -from core.workflow.nodes.end.end_node import EndNode -from core.workflow.nodes.http_request.http_request_node import HttpRequestNode -from core.workflow.nodes.if_else.if_else_node import IfElseNode -from core.workflow.nodes.iteration.iteration_node import IterationNode -from core.workflow.nodes.iteration.iteration_start_node import IterationStartNode -from core.workflow.nodes.knowledge_retrieval.knowledge_retrieval_node import KnowledgeRetrievalNode -from core.workflow.nodes.llm.llm_node import LLMNode -from core.workflow.nodes.parameter_extractor.parameter_extractor_node import ParameterExtractorNode -from core.workflow.nodes.question_classifier.question_classifier_node import QuestionClassifierNode -from core.workflow.nodes.start.start_node import StartNode -from core.workflow.nodes.template_transform.template_transform_node import TemplateTransformNode -from core.workflow.nodes.tool.tool_node import ToolNode -from core.workflow.nodes.variable_aggregator.variable_aggregator_node import VariableAggregatorNode +from core.workflow.nodes.answer import AnswerNode +from core.workflow.nodes.base import BaseNode +from core.workflow.nodes.code import CodeNode +from core.workflow.nodes.document_extractor import DocumentExtractorNode +from core.workflow.nodes.end import EndNode +from core.workflow.nodes.enums import NodeType +from core.workflow.nodes.http_request import HttpRequestNode +from core.workflow.nodes.if_else import IfElseNode +from core.workflow.nodes.iteration import IterationNode, IterationStartNode +from core.workflow.nodes.knowledge_retrieval import KnowledgeRetrievalNode +from core.workflow.nodes.list_operator import ListOperatorNode +from core.workflow.nodes.llm import LLMNode +from core.workflow.nodes.parameter_extractor import ParameterExtractorNode +from core.workflow.nodes.question_classifier import QuestionClassifierNode +from core.workflow.nodes.start import StartNode +from core.workflow.nodes.template_transform import TemplateTransformNode +from core.workflow.nodes.tool import ToolNode +from core.workflow.nodes.variable_aggregator import VariableAggregatorNode from core.workflow.nodes.variable_assigner import VariableAssignerNode -node_classes = { +node_type_classes_mapping: dict[NodeType, type[BaseNode]] = { NodeType.START: StartNode, NodeType.END: EndNode, NodeType.ANSWER: AnswerNode, @@ -34,4 +36,6 @@ node_classes = { NodeType.ITERATION_START: IterationStartNode, NodeType.PARAMETER_EXTRACTOR: ParameterExtractorNode, NodeType.CONVERSATION_VARIABLE_ASSIGNER: VariableAssignerNode, + NodeType.DOCUMENT_EXTRACTOR: DocumentExtractorNode, + NodeType.LIST_OPERATOR: ListOperatorNode, } diff --git a/api/core/workflow/nodes/parameter_extractor/__init__.py b/api/core/workflow/nodes/parameter_extractor/__init__.py index e69de29bb2..bdbf19a7d3 100644 --- a/api/core/workflow/nodes/parameter_extractor/__init__.py +++ b/api/core/workflow/nodes/parameter_extractor/__init__.py @@ -0,0 +1,3 @@ +from .parameter_extractor_node import ParameterExtractorNode + +__all__ = ["ParameterExtractorNode"] diff --git a/api/core/workflow/nodes/parameter_extractor/entities.py b/api/core/workflow/nodes/parameter_extractor/entities.py index 5697d7c049..a001b44dc7 100644 --- a/api/core/workflow/nodes/parameter_extractor/entities.py +++ b/api/core/workflow/nodes/parameter_extractor/entities.py @@ -1,20 +1,10 @@ from typing import Any, Literal, Optional -from pydantic import BaseModel, field_validator +from pydantic import BaseModel, Field, field_validator from core.prompt.entities.advanced_prompt_entities import MemoryConfig -from core.workflow.entities.base_node_data_entities import BaseNodeData - - -class ModelConfig(BaseModel): - """ - Model Config. - """ - - provider: str - name: str - mode: str - completion_params: dict[str, Any] = {} +from core.workflow.nodes.base import BaseNodeData +from core.workflow.nodes.llm import ModelConfig, VisionConfig class ParameterConfig(BaseModel): @@ -49,6 +39,7 @@ class ParameterExtractorNodeData(BaseNodeData): instruction: Optional[str] = None memory: Optional[MemoryConfig] = None reasoning_mode: Literal["function_call", "prompt"] + vision: VisionConfig = Field(default_factory=VisionConfig) @field_validator("reasoning_mode", mode="before") @classmethod @@ -64,7 +55,7 @@ class ParameterExtractorNodeData(BaseNodeData): parameters = {"type": "object", "properties": {}, "required": []} for parameter in self.parameters: - parameter_schema = {"description": parameter.description} + parameter_schema: dict[str, Any] = {"description": parameter.description} if parameter.type in {"string", "select"}: parameter_schema["type"] = "string" diff --git a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py index a6454bd1cd..49546e9356 100644 --- a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py +++ b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py @@ -4,6 +4,7 @@ from collections.abc import Mapping, Sequence from typing import Any, Optional, cast from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity +from core.file import File from core.memory.token_buffer_memory import TokenBufferMemory from core.model_manager import ModelInstance from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage @@ -22,12 +23,16 @@ from core.prompt.advanced_prompt_transform import AdvancedPromptTransform from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate from core.prompt.simple_prompt_transform import ModelMode from core.prompt.utils.prompt_message_util import PromptMessageUtil -from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult, NodeType +from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult from core.workflow.entities.variable_pool import VariablePool -from core.workflow.nodes.llm.entities import ModelConfig -from core.workflow.nodes.llm.llm_node import LLMNode -from core.workflow.nodes.parameter_extractor.entities import ParameterExtractorNodeData -from core.workflow.nodes.parameter_extractor.prompts import ( +from core.workflow.nodes.enums import NodeType +from core.workflow.nodes.llm import LLMNode, ModelConfig +from core.workflow.utils import variable_template_parser +from extensions.ext_database import db +from models.workflow import WorkflowNodeExecutionStatus + +from .entities import ParameterExtractorNodeData +from .prompts import ( CHAT_EXAMPLE, CHAT_GENERATE_JSON_USER_MESSAGE_TEMPLATE, COMPLETION_GENERATE_JSON_PROMPT, @@ -36,9 +41,6 @@ from core.workflow.nodes.parameter_extractor.prompts import ( FUNCTION_CALLING_EXTRACTOR_SYSTEM_PROMPT, FUNCTION_CALLING_EXTRACTOR_USER_TEMPLATE, ) -from core.workflow.utils.variable_template_parser import VariableTemplateParser -from extensions.ext_database import db -from models.workflow import WorkflowNodeExecutionStatus class ParameterExtractorNode(LLMNode): @@ -65,33 +67,39 @@ class ParameterExtractorNode(LLMNode): } } - def _run(self) -> NodeRunResult: + def _run(self): """ Run the node. """ node_data = cast(ParameterExtractorNodeData, self.node_data) - variable = self.graph_runtime_state.variable_pool.get_any(node_data.query) - if not variable: - raise ValueError("Input variable content not found or is empty") - query = variable + variable = self.graph_runtime_state.variable_pool.get(node_data.query) + query = variable.text if variable else "" - inputs = { - "query": query, - "parameters": jsonable_encoder(node_data.parameters), - "instruction": jsonable_encoder(node_data.instruction), - } + files = ( + self._fetch_files( + selector=node_data.vision.configs.variable_selector, + ) + if node_data.vision.enabled + else [] + ) model_instance, model_config = self._fetch_model_config(node_data.model) if not isinstance(model_instance.model_type_instance, LargeLanguageModel): raise ValueError("Model is not a Large Language Model") llm_model = model_instance.model_type_instance - model_schema = llm_model.get_model_schema(model_config.model, model_config.credentials) + model_schema = llm_model.get_model_schema( + model=model_config.model, + credentials=model_config.credentials, + ) if not model_schema: raise ValueError("Model schema not found") # fetch memory - memory = self._fetch_memory(node_data.memory, self.graph_runtime_state.variable_pool, model_instance) + memory = self._fetch_memory( + node_data_memory=node_data.memory, + model_instance=model_instance, + ) if ( set(model_schema.features or []) & {ModelFeature.TOOL_CALL, ModelFeature.MULTI_TOOL_CALL} @@ -99,15 +107,33 @@ class ParameterExtractorNode(LLMNode): ): # use function call prompt_messages, prompt_message_tools = self._generate_function_call_prompt( - node_data, query, self.graph_runtime_state.variable_pool, model_config, memory + node_data=node_data, + query=query, + variable_pool=self.graph_runtime_state.variable_pool, + model_config=model_config, + memory=memory, + files=files, ) else: # use prompt engineering prompt_messages = self._generate_prompt_engineering_prompt( - node_data, query, self.graph_runtime_state.variable_pool, model_config, memory + data=node_data, + query=query, + variable_pool=self.graph_runtime_state.variable_pool, + model_config=model_config, + memory=memory, + files=files, ) + prompt_message_tools = [] + inputs = { + "query": query, + "files": [f.to_dict() for f in files], + "parameters": jsonable_encoder(node_data.parameters), + "instruction": jsonable_encoder(node_data.instruction), + } + process_data = { "model_mode": model_config.mode, "prompts": PromptMessageUtil.prompt_messages_to_prompt_for_saving( @@ -119,7 +145,7 @@ class ParameterExtractorNode(LLMNode): } try: - text, usage, tool_call = self._invoke_llm( + text, usage, tool_call = self._invoke( node_data_model=node_data.model, model_instance=model_instance, prompt_messages=prompt_messages, @@ -150,12 +176,12 @@ class ParameterExtractorNode(LLMNode): error = "Failed to extract result from function call or text response, using empty result." try: - result = self._validate_result(node_data, result) + result = self._validate_result(data=node_data, result=result or {}) except Exception as e: error = str(e) # transform result into standard format - result = self._transform_result(node_data, result) + result = self._transform_result(data=node_data, result=result or {}) return NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, @@ -170,7 +196,7 @@ class ParameterExtractorNode(LLMNode): llm_usage=usage, ) - def _invoke_llm( + def _invoke( self, node_data_model: ModelConfig, model_instance: ModelInstance, @@ -178,14 +204,6 @@ class ParameterExtractorNode(LLMNode): tools: list[PromptMessageTool], stop: list[str], ) -> tuple[str, LLMUsage, Optional[AssistantPromptMessage.ToolCall]]: - """ - Invoke large language model - :param node_data_model: node data model - :param model_instance: model instance - :param prompt_messages: prompt messages - :param stop: stop - :return: - """ db.session.close() invoke_result = model_instance.invoke_llm( @@ -202,6 +220,9 @@ class ParameterExtractorNode(LLMNode): raise ValueError(f"Invalid invoke result: {invoke_result}") text = invoke_result.message.content + if not isinstance(text, str): + raise ValueError(f"Invalid text content type: {type(text)}. Expected str.") + usage = invoke_result.usage tool_call = invoke_result.message.tool_calls[0] if invoke_result.message.tool_calls else None @@ -217,6 +238,7 @@ class ParameterExtractorNode(LLMNode): variable_pool: VariablePool, model_config: ModelConfigWithCredentialsEntity, memory: Optional[TokenBufferMemory], + files: Sequence[File], ) -> tuple[list[PromptMessage], list[PromptMessageTool]]: """ Generate function call prompt. @@ -234,7 +256,7 @@ class ParameterExtractorNode(LLMNode): prompt_template=prompt_template, inputs={}, query="", - files=[], + files=files, context="", memory_config=node_data.memory, memory=None, @@ -296,6 +318,7 @@ class ParameterExtractorNode(LLMNode): variable_pool: VariablePool, model_config: ModelConfigWithCredentialsEntity, memory: Optional[TokenBufferMemory], + files: Sequence[File], ) -> list[PromptMessage]: """ Generate prompt engineering prompt. @@ -303,9 +326,23 @@ class ParameterExtractorNode(LLMNode): model_mode = ModelMode.value_of(data.model.mode) if model_mode == ModelMode.COMPLETION: - return self._generate_prompt_engineering_completion_prompt(data, query, variable_pool, model_config, memory) + return self._generate_prompt_engineering_completion_prompt( + node_data=data, + query=query, + variable_pool=variable_pool, + model_config=model_config, + memory=memory, + files=files, + ) elif model_mode == ModelMode.CHAT: - return self._generate_prompt_engineering_chat_prompt(data, query, variable_pool, model_config, memory) + return self._generate_prompt_engineering_chat_prompt( + node_data=data, + query=query, + variable_pool=variable_pool, + model_config=model_config, + memory=memory, + files=files, + ) else: raise ValueError(f"Invalid model mode: {model_mode}") @@ -316,20 +353,23 @@ class ParameterExtractorNode(LLMNode): variable_pool: VariablePool, model_config: ModelConfigWithCredentialsEntity, memory: Optional[TokenBufferMemory], + files: Sequence[File], ) -> list[PromptMessage]: """ Generate completion prompt. """ prompt_transform = AdvancedPromptTransform(with_variable_tmpl=True) - rest_token = self._calculate_rest_token(node_data, query, variable_pool, model_config, "") + rest_token = self._calculate_rest_token( + node_data=node_data, query=query, variable_pool=variable_pool, model_config=model_config, context="" + ) prompt_template = self._get_prompt_engineering_prompt_template( - node_data, query, variable_pool, memory, rest_token + node_data=node_data, query=query, variable_pool=variable_pool, memory=memory, max_token_limit=rest_token ) prompt_messages = prompt_transform.get_prompt( prompt_template=prompt_template, inputs={"structure": json.dumps(node_data.get_parameter_json_schema())}, query="", - files=[], + files=files, context="", memory_config=node_data.memory, memory=memory, @@ -345,27 +385,30 @@ class ParameterExtractorNode(LLMNode): variable_pool: VariablePool, model_config: ModelConfigWithCredentialsEntity, memory: Optional[TokenBufferMemory], + files: Sequence[File], ) -> list[PromptMessage]: """ Generate chat prompt. """ prompt_transform = AdvancedPromptTransform(with_variable_tmpl=True) - rest_token = self._calculate_rest_token(node_data, query, variable_pool, model_config, "") + rest_token = self._calculate_rest_token( + node_data=node_data, query=query, variable_pool=variable_pool, model_config=model_config, context="" + ) prompt_template = self._get_prompt_engineering_prompt_template( - node_data, - CHAT_GENERATE_JSON_USER_MESSAGE_TEMPLATE.format( + node_data=node_data, + query=CHAT_GENERATE_JSON_USER_MESSAGE_TEMPLATE.format( structure=json.dumps(node_data.get_parameter_json_schema()), text=query ), - variable_pool, - memory, - rest_token, + variable_pool=variable_pool, + memory=memory, + max_token_limit=rest_token, ) prompt_messages = prompt_transform.get_prompt( prompt_template=prompt_template, inputs={}, query="", - files=[], + files=files, context="", memory_config=node_data.memory, memory=None, @@ -425,10 +468,11 @@ class ParameterExtractorNode(LLMNode): raise ValueError(f"Invalid `string` value for parameter {parameter.name}") if parameter.type.startswith("array"): - if not isinstance(result.get(parameter.name), list): + parameters = result.get(parameter.name) + if not isinstance(parameters, list): raise ValueError(f"Invalid `array` value for parameter {parameter.name}") nested_type = parameter.type[6:-1] - for item in result.get(parameter.name): + for item in parameters: if nested_type == "number" and not isinstance(item, int | float): raise ValueError(f"Invalid `array[number]` value for parameter {parameter.name}") if nested_type == "string" and not isinstance(item, str): @@ -565,18 +609,6 @@ class ParameterExtractorNode(LLMNode): return result - def _render_instruction(self, instruction: str, variable_pool: VariablePool) -> str: - """ - Render instruction. - """ - variable_template_parser = VariableTemplateParser(instruction) - inputs = {} - for selector in variable_template_parser.extract_variable_selectors(): - variable = variable_pool.get_any(selector.value_selector) - inputs[selector.variable] = variable - - return variable_template_parser.format(inputs) - def _get_function_calling_prompt_template( self, node_data: ParameterExtractorNodeData, @@ -588,9 +620,9 @@ class ParameterExtractorNode(LLMNode): model_mode = ModelMode.value_of(node_data.model.mode) input_text = query memory_str = "" - instruction = self._render_instruction(node_data.instruction or "", variable_pool) + instruction = variable_pool.convert_template(node_data.instruction or "").text - if memory: + if memory and node_data.memory and node_data.memory.window: memory_str = memory.get_history_prompt_text( max_token_limit=max_token_limit, message_limit=node_data.memory.window.size ) @@ -611,13 +643,13 @@ class ParameterExtractorNode(LLMNode): variable_pool: VariablePool, memory: Optional[TokenBufferMemory], max_token_limit: int = 2000, - ) -> list[ChatModelMessage]: + ): model_mode = ModelMode.value_of(node_data.model.mode) input_text = query memory_str = "" - instruction = self._render_instruction(node_data.instruction or "", variable_pool) + instruction = variable_pool.convert_template(node_data.instruction or "").text - if memory: + if memory and node_data.memory and node_data.memory.window: memory_str = memory.get_history_prompt_text( max_token_limit=max_token_limit, message_limit=node_data.memory.window.size ) @@ -691,7 +723,7 @@ class ParameterExtractorNode(LLMNode): ): max_tokens = ( model_config.parameters.get(parameter_rule.name) - or model_config.parameters.get(parameter_rule.use_template) + or model_config.parameters.get(parameter_rule.use_template or "") ) or 0 rest_tokens = model_context_tokens - max_tokens - curr_message_tokens @@ -712,7 +744,11 @@ class ParameterExtractorNode(LLMNode): @classmethod def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: ParameterExtractorNodeData + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: ParameterExtractorNodeData, ) -> Mapping[str, Sequence[str]]: """ Extract variable selector to variable mapping @@ -721,11 +757,11 @@ class ParameterExtractorNode(LLMNode): :param node_data: node data :return: """ - variable_mapping = {"query": node_data.query} + variable_mapping: dict[str, Sequence[str]] = {"query": node_data.query} if node_data.instruction: - variable_template_parser = VariableTemplateParser(template=node_data.instruction) - for selector in variable_template_parser.extract_variable_selectors(): + selectors = variable_template_parser.extract_selectors_from_template(node_data.instruction) + for selector in selectors: variable_mapping[selector.variable] = selector.value_selector variable_mapping = {node_id + "." + key: value for key, value in variable_mapping.items()} diff --git a/api/core/workflow/nodes/question_classifier/__init__.py b/api/core/workflow/nodes/question_classifier/__init__.py index e69de29bb2..70414c4199 100644 --- a/api/core/workflow/nodes/question_classifier/__init__.py +++ b/api/core/workflow/nodes/question_classifier/__init__.py @@ -0,0 +1,4 @@ +from .entities import QuestionClassifierNodeData +from .question_classifier_node import QuestionClassifierNode + +__all__ = ["QuestionClassifierNodeData", "QuestionClassifierNode"] diff --git a/api/core/workflow/nodes/question_classifier/entities.py b/api/core/workflow/nodes/question_classifier/entities.py index 40f7ce7582..5219f11d26 100644 --- a/api/core/workflow/nodes/question_classifier/entities.py +++ b/api/core/workflow/nodes/question_classifier/entities.py @@ -1,39 +1,21 @@ -from typing import Any, Optional +from typing import Optional -from pydantic import BaseModel +from pydantic import BaseModel, Field from core.prompt.entities.advanced_prompt_entities import MemoryConfig -from core.workflow.entities.base_node_data_entities import BaseNodeData - - -class ModelConfig(BaseModel): - """ - Model Config. - """ - - provider: str - name: str - mode: str - completion_params: dict[str, Any] = {} +from core.workflow.nodes.base import BaseNodeData +from core.workflow.nodes.llm import ModelConfig, VisionConfig class ClassConfig(BaseModel): - """ - Class Config. - """ - id: str name: str class QuestionClassifierNodeData(BaseNodeData): - """ - Knowledge retrieval Node Data. - """ - query_variable_selector: list[str] - type: str = "question-classifier" model: ModelConfig classes: list[ClassConfig] instruction: Optional[str] = None memory: Optional[MemoryConfig] = None + vision: VisionConfig = Field(default_factory=VisionConfig) diff --git a/api/core/workflow/nodes/question_classifier/question_classifier_node.py b/api/core/workflow/nodes/question_classifier/question_classifier_node.py index 2ae58bc5f7..ee160e7c69 100644 --- a/api/core/workflow/nodes/question_classifier/question_classifier_node.py +++ b/api/core/workflow/nodes/question_classifier/question_classifier_node.py @@ -1,25 +1,30 @@ import json import logging from collections.abc import Mapping, Sequence -from typing import Any, Optional, Union, cast +from typing import TYPE_CHECKING, Any, Optional, cast from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity from core.memory.token_buffer_memory import TokenBufferMemory from core.model_manager import ModelInstance -from core.model_runtime.entities.llm_entities import LLMUsage -from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageRole -from core.model_runtime.entities.model_entities import ModelPropertyKey +from core.model_runtime.entities import LLMUsage, ModelPropertyKey, PromptMessageRole from core.model_runtime.utils.encoders import jsonable_encoder from core.prompt.advanced_prompt_transform import AdvancedPromptTransform -from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate from core.prompt.simple_prompt_transform import ModelMode from core.prompt.utils.prompt_message_util import PromptMessageUtil -from core.prompt.utils.prompt_template_parser import PromptTemplateParser -from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult, NodeType -from core.workflow.entities.variable_pool import VariablePool -from core.workflow.nodes.llm.llm_node import LLMNode, ModelInvokeCompleted -from core.workflow.nodes.question_classifier.entities import QuestionClassifierNodeData -from core.workflow.nodes.question_classifier.template_prompts import ( +from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult +from core.workflow.nodes.enums import NodeType +from core.workflow.nodes.event import ModelInvokeCompletedEvent +from core.workflow.nodes.llm import ( + LLMNode, + LLMNodeChatModelMessage, + LLMNodeCompletionModelPromptTemplate, +) +from core.workflow.utils.variable_template_parser import VariableTemplateParser +from libs.json_in_md_parser import parse_and_check_json_markdown +from models.workflow import WorkflowNodeExecutionStatus + +from .entities import QuestionClassifierNodeData +from .template_prompts import ( QUESTION_CLASSIFIER_ASSISTANT_PROMPT_1, QUESTION_CLASSIFIER_ASSISTANT_PROMPT_2, QUESTION_CLASSIFIER_COMPLETION_PROMPT, @@ -28,46 +33,78 @@ from core.workflow.nodes.question_classifier.template_prompts import ( QUESTION_CLASSIFIER_USER_PROMPT_2, QUESTION_CLASSIFIER_USER_PROMPT_3, ) -from core.workflow.utils.variable_template_parser import VariableTemplateParser -from libs.json_in_md_parser import parse_and_check_json_markdown -from models.workflow import WorkflowNodeExecutionStatus + +if TYPE_CHECKING: + from core.file import File class QuestionClassifierNode(LLMNode): _node_data_cls = QuestionClassifierNodeData - node_type = NodeType.QUESTION_CLASSIFIER + _node_type = NodeType.QUESTION_CLASSIFIER - def _run(self) -> NodeRunResult: - node_data: QuestionClassifierNodeData = cast(self._node_data_cls, self.node_data) - node_data = cast(QuestionClassifierNodeData, node_data) + def _run(self): + node_data = cast(QuestionClassifierNodeData, self.node_data) variable_pool = self.graph_runtime_state.variable_pool # extract variables - variable = variable_pool.get(node_data.query_variable_selector) + variable = variable_pool.get(node_data.query_variable_selector) if node_data.query_variable_selector else None query = variable.value if variable else None variables = {"query": query} # fetch model config model_instance, model_config = self._fetch_model_config(node_data.model) # fetch memory - memory = self._fetch_memory(node_data.memory, variable_pool, model_instance) + memory = self._fetch_memory( + node_data_memory=node_data.memory, + model_instance=model_instance, + ) # fetch instruction - instruction = self._format_instruction(node_data.instruction, variable_pool) if node_data.instruction else "" - node_data.instruction = instruction + node_data.instruction = node_data.instruction or "" + node_data.instruction = variable_pool.convert_template(node_data.instruction).text + + files: Sequence[File] = ( + self._fetch_files( + selector=node_data.vision.configs.variable_selector, + ) + if node_data.vision.enabled + else [] + ) + # fetch prompt messages - prompt_messages, stop = self._fetch_prompt( - node_data=node_data, context="", query=query, memory=memory, model_config=model_config + rest_token = self._calculate_rest_token( + node_data=node_data, + query=query or "", + model_config=model_config, + context="", + ) + prompt_template = self._get_prompt_template( + node_data=node_data, + query=query or "", + memory=memory, + max_token_limit=rest_token, + ) + prompt_messages, stop = self._fetch_prompt_messages( + prompt_template=prompt_template, + system_query=query, + memory=memory, + model_config=model_config, + files=files, + vision_enabled=node_data.vision.enabled, + vision_detail=node_data.vision.configs.detail, ) # handle invoke result generator = self._invoke_llm( - node_data_model=node_data.model, model_instance=model_instance, prompt_messages=prompt_messages, stop=stop + node_data_model=node_data.model, + model_instance=model_instance, + prompt_messages=prompt_messages, + stop=stop, ) result_text = "" usage = LLMUsage.empty_usage() finish_reason = None for event in generator: - if isinstance(event, ModelInvokeCompleted): + if isinstance(event, ModelInvokeCompletedEvent): result_text = event.text usage = event.usage finish_reason = event.finish_reason @@ -129,7 +166,11 @@ class QuestionClassifierNode(LLMNode): @classmethod def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: QuestionClassifierNodeData + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: QuestionClassifierNodeData, ) -> Mapping[str, Sequence[str]]: """ Extract variable selector to variable mapping @@ -159,40 +200,6 @@ class QuestionClassifierNode(LLMNode): """ return {"type": "question-classifier", "config": {"instructions": ""}} - def _fetch_prompt( - self, - node_data: QuestionClassifierNodeData, - query: str, - context: Optional[str], - memory: Optional[TokenBufferMemory], - model_config: ModelConfigWithCredentialsEntity, - ) -> tuple[list[PromptMessage], Optional[list[str]]]: - """ - Fetch prompt - :param node_data: node data - :param query: inputs - :param context: context - :param memory: memory - :param model_config: model config - :return: - """ - prompt_transform = AdvancedPromptTransform(with_variable_tmpl=True) - rest_token = self._calculate_rest_token(node_data, query, model_config, context) - prompt_template = self._get_prompt_template(node_data, query, memory, rest_token) - prompt_messages = prompt_transform.get_prompt( - prompt_template=prompt_template, - inputs={}, - query="", - files=[], - context=context, - memory_config=node_data.memory, - memory=None, - model_config=model_config, - ) - stop = model_config.stop - - return prompt_messages, stop - def _calculate_rest_token( self, node_data: QuestionClassifierNodeData, @@ -229,7 +236,7 @@ class QuestionClassifierNode(LLMNode): ): max_tokens = ( model_config.parameters.get(parameter_rule.name) - or model_config.parameters.get(parameter_rule.use_template) + or model_config.parameters.get(parameter_rule.use_template or "") ) or 0 rest_tokens = model_context_tokens - max_tokens - curr_message_tokens @@ -243,7 +250,7 @@ class QuestionClassifierNode(LLMNode): query: str, memory: Optional[TokenBufferMemory], max_token_limit: int = 2000, - ) -> Union[list[ChatModelMessage], CompletionModelPromptTemplate]: + ): model_mode = ModelMode.value_of(node_data.model.mode) classes = node_data.classes categories = [] @@ -255,31 +262,32 @@ class QuestionClassifierNode(LLMNode): memory_str = "" if memory: memory_str = memory.get_history_prompt_text( - max_token_limit=max_token_limit, message_limit=node_data.memory.window.size + max_token_limit=max_token_limit, + message_limit=node_data.memory.window.size if node_data.memory and node_data.memory.window else None, ) - prompt_messages = [] + prompt_messages: list[LLMNodeChatModelMessage] = [] if model_mode == ModelMode.CHAT: - system_prompt_messages = ChatModelMessage( + system_prompt_messages = LLMNodeChatModelMessage( role=PromptMessageRole.SYSTEM, text=QUESTION_CLASSIFIER_SYSTEM_PROMPT.format(histories=memory_str) ) prompt_messages.append(system_prompt_messages) - user_prompt_message_1 = ChatModelMessage( + user_prompt_message_1 = LLMNodeChatModelMessage( role=PromptMessageRole.USER, text=QUESTION_CLASSIFIER_USER_PROMPT_1 ) prompt_messages.append(user_prompt_message_1) - assistant_prompt_message_1 = ChatModelMessage( + assistant_prompt_message_1 = LLMNodeChatModelMessage( role=PromptMessageRole.ASSISTANT, text=QUESTION_CLASSIFIER_ASSISTANT_PROMPT_1 ) prompt_messages.append(assistant_prompt_message_1) - user_prompt_message_2 = ChatModelMessage( + user_prompt_message_2 = LLMNodeChatModelMessage( role=PromptMessageRole.USER, text=QUESTION_CLASSIFIER_USER_PROMPT_2 ) prompt_messages.append(user_prompt_message_2) - assistant_prompt_message_2 = ChatModelMessage( + assistant_prompt_message_2 = LLMNodeChatModelMessage( role=PromptMessageRole.ASSISTANT, text=QUESTION_CLASSIFIER_ASSISTANT_PROMPT_2 ) prompt_messages.append(assistant_prompt_message_2) - user_prompt_message_3 = ChatModelMessage( + user_prompt_message_3 = LLMNodeChatModelMessage( role=PromptMessageRole.USER, text=QUESTION_CLASSIFIER_USER_PROMPT_3.format( input_text=input_text, @@ -290,7 +298,7 @@ class QuestionClassifierNode(LLMNode): prompt_messages.append(user_prompt_message_3) return prompt_messages elif model_mode == ModelMode.COMPLETION: - return CompletionModelPromptTemplate( + return LLMNodeCompletionModelPromptTemplate( text=QUESTION_CLASSIFIER_COMPLETION_PROMPT.format( histories=memory_str, input_text=input_text, @@ -302,23 +310,3 @@ class QuestionClassifierNode(LLMNode): else: raise ValueError(f"Model mode {model_mode} not support.") - - def _format_instruction(self, instruction: str, variable_pool: VariablePool) -> str: - inputs = {} - - variable_selectors = [] - variable_template_parser = VariableTemplateParser(template=instruction) - variable_selectors.extend(variable_template_parser.extract_variable_selectors()) - for variable_selector in variable_selectors: - variable = variable_pool.get(variable_selector.value_selector) - variable_value = variable.value if variable else None - if variable_value is None: - raise ValueError(f"Variable {variable_selector.variable} not found") - - inputs[variable_selector.variable] = variable_value - - prompt_template = PromptTemplateParser(template=instruction, with_variable_tmpl=True) - prompt_inputs = {k: inputs[k] for k in prompt_template.variable_keys if k in inputs} - - instruction = prompt_template.format(prompt_inputs) - return instruction diff --git a/api/core/workflow/nodes/start/__init__.py b/api/core/workflow/nodes/start/__init__.py index e69de29bb2..5411780423 100644 --- a/api/core/workflow/nodes/start/__init__.py +++ b/api/core/workflow/nodes/start/__init__.py @@ -0,0 +1,3 @@ +from .start_node import StartNode + +__all__ = ["StartNode"] diff --git a/api/core/workflow/nodes/start/entities.py b/api/core/workflow/nodes/start/entities.py index 11d2ebe5dd..594d1b7bab 100644 --- a/api/core/workflow/nodes/start/entities.py +++ b/api/core/workflow/nodes/start/entities.py @@ -3,7 +3,7 @@ from collections.abc import Sequence from pydantic import Field from core.app.app_config.entities import VariableEntity -from core.workflow.entities.base_node_data_entities import BaseNodeData +from core.workflow.nodes.base import BaseNodeData class StartNodeData(BaseNodeData): diff --git a/api/core/workflow/nodes/start/start_node.py b/api/core/workflow/nodes/start/start_node.py index 96c887c58d..a7b91e82bb 100644 --- a/api/core/workflow/nodes/start/start_node.py +++ b/api/core/workflow/nodes/start/start_node.py @@ -1,25 +1,24 @@ from collections.abc import Mapping, Sequence from typing import Any -from core.workflow.entities.node_entities import NodeRunResult, NodeType -from core.workflow.entities.variable_pool import SYSTEM_VARIABLE_NODE_ID -from core.workflow.nodes.base_node import BaseNode +from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID +from core.workflow.entities.node_entities import NodeRunResult +from core.workflow.nodes.base import BaseNode +from core.workflow.nodes.enums import NodeType from core.workflow.nodes.start.entities import StartNodeData from models.workflow import WorkflowNodeExecutionStatus -class StartNode(BaseNode): +class StartNode(BaseNode[StartNodeData]): _node_data_cls = StartNodeData _node_type = NodeType.START def _run(self) -> NodeRunResult: - """ - Run node - :return: - """ node_inputs = dict(self.graph_runtime_state.variable_pool.user_inputs) system_inputs = self.graph_runtime_state.variable_pool.system_variables + # TODO: System variables should be directly accessible, no need for special handling + # Set system variables as node outputs. for var in system_inputs: node_inputs[SYSTEM_VARIABLE_NODE_ID + "." + var] = system_inputs[var] @@ -27,13 +26,10 @@ class StartNode(BaseNode): @classmethod def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: StartNodeData + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: StartNodeData, ) -> Mapping[str, Sequence[str]]: - """ - Extract variable selector to variable mapping - :param graph_config: graph config - :param node_id: node id - :param node_data: node data - :return: - """ return {} diff --git a/api/core/workflow/nodes/template_transform/__init__.py b/api/core/workflow/nodes/template_transform/__init__.py index e69de29bb2..43863b9d59 100644 --- a/api/core/workflow/nodes/template_transform/__init__.py +++ b/api/core/workflow/nodes/template_transform/__init__.py @@ -0,0 +1,3 @@ +from .template_transform_node import TemplateTransformNode + +__all__ = ["TemplateTransformNode"] diff --git a/api/core/workflow/nodes/template_transform/entities.py b/api/core/workflow/nodes/template_transform/entities.py index e934d69fa3..96adff6ffa 100644 --- a/api/core/workflow/nodes/template_transform/entities.py +++ b/api/core/workflow/nodes/template_transform/entities.py @@ -1,5 +1,5 @@ -from core.workflow.entities.base_node_data_entities import BaseNodeData from core.workflow.entities.variable_entities import VariableSelector +from core.workflow.nodes.base import BaseNodeData class TemplateTransformNodeData(BaseNodeData): diff --git a/api/core/workflow/nodes/template_transform/template_transform_node.py b/api/core/workflow/nodes/template_transform/template_transform_node.py index 32c99e0d1c..0ee66784c5 100644 --- a/api/core/workflow/nodes/template_transform/template_transform_node.py +++ b/api/core/workflow/nodes/template_transform/template_transform_node.py @@ -1,17 +1,18 @@ import os from collections.abc import Mapping, Sequence -from typing import Any, Optional, cast +from typing import Any, Optional from core.helper.code_executor.code_executor import CodeExecutionError, CodeExecutor, CodeLanguage -from core.workflow.entities.node_entities import NodeRunResult, NodeType -from core.workflow.nodes.base_node import BaseNode +from core.workflow.entities.node_entities import NodeRunResult +from core.workflow.nodes.base import BaseNode +from core.workflow.nodes.enums import NodeType from core.workflow.nodes.template_transform.entities import TemplateTransformNodeData from models.workflow import WorkflowNodeExecutionStatus MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = int(os.environ.get("TEMPLATE_TRANSFORM_MAX_LENGTH", "80000")) -class TemplateTransformNode(BaseNode): +class TemplateTransformNode(BaseNode[TemplateTransformNodeData]): _node_data_cls = TemplateTransformNodeData _node_type = NodeType.TEMPLATE_TRANSFORM @@ -28,22 +29,21 @@ class TemplateTransformNode(BaseNode): } def _run(self) -> NodeRunResult: - """ - Run node - """ - node_data = self.node_data - node_data: TemplateTransformNodeData = cast(self._node_data_cls, node_data) - # Get variables variables = {} - for variable_selector in node_data.variables: + for variable_selector in self.node_data.variables: variable_name = variable_selector.variable - value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector) - variables[variable_name] = value + value = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector) + if value is None: + return NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + error=f"Variable {variable_name} not found in variable pool", + ) + variables[variable_name] = value.to_object() # Run code try: result = CodeExecutor.execute_workflow_code_template( - language=CodeLanguage.JINJA2, code=node_data.template, inputs=variables + language=CodeLanguage.JINJA2, code=self.node_data.template, inputs=variables ) except CodeExecutionError as e: return NodeRunResult(inputs=variables, status=WorkflowNodeExecutionStatus.FAILED, error=str(e)) @@ -61,7 +61,7 @@ class TemplateTransformNode(BaseNode): @classmethod def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: TemplateTransformNodeData + cls, *, graph_config: Mapping[str, Any], node_id: str, node_data: TemplateTransformNodeData ) -> Mapping[str, Sequence[str]]: """ Extract variable selector to variable mapping diff --git a/api/core/workflow/nodes/tool/__init__.py b/api/core/workflow/nodes/tool/__init__.py index e69de29bb2..f4982e655d 100644 --- a/api/core/workflow/nodes/tool/__init__.py +++ b/api/core/workflow/nodes/tool/__init__.py @@ -0,0 +1,3 @@ +from .tool_node import ToolNode + +__all__ = ["ToolNode"] diff --git a/api/core/workflow/nodes/tool/entities.py b/api/core/workflow/nodes/tool/entities.py index 9d222b10b9..9e29791481 100644 --- a/api/core/workflow/nodes/tool/entities.py +++ b/api/core/workflow/nodes/tool/entities.py @@ -3,7 +3,7 @@ from typing import Any, Literal, Union from pydantic import BaseModel, field_validator from pydantic_core.core_schema import ValidationInfo -from core.workflow.entities.base_node_data_entities import BaseNodeData +from core.workflow.nodes.base import BaseNodeData class ToolEntity(BaseModel): @@ -51,7 +51,4 @@ class ToolNodeData(BaseNodeData, ToolEntity): raise ValueError("value must be a string, int, float, or bool") return typ - """ - Tool Node Schema - """ tool_parameters: dict[str, ToolInput] diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index 3b86b29cf8..df22130d69 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -1,24 +1,28 @@ from collections.abc import Mapping, Sequence from os import path -from typing import Any, cast +from typing import Any + +from sqlalchemy import select +from sqlalchemy.orm import Session -from core.app.segments import ArrayAnySegment, ArrayAnyVariable, parser from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler -from core.file.file_obj import FileTransferMethod, FileType, FileVar +from core.file.models import File, FileTransferMethod, FileType from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter from core.tools.tool_engine import ToolEngine from core.tools.tool_manager import ToolManager from core.tools.utils.message_transformer import ToolFileMessageTransformer -from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult, NodeType +from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult from core.workflow.entities.variable_pool import VariablePool -from core.workflow.enums import SystemVariableKey -from core.workflow.nodes.base_node import BaseNode +from core.workflow.nodes.base import BaseNode +from core.workflow.nodes.enums import NodeType from core.workflow.nodes.tool.entities import ToolNodeData from core.workflow.utils.variable_template_parser import VariableTemplateParser -from models import WorkflowNodeExecutionStatus +from extensions.ext_database import db +from models import ToolFile +from models.workflow import WorkflowNodeExecutionStatus -class ToolNode(BaseNode): +class ToolNode(BaseNode[ToolNodeData]): """ Tool Node """ @@ -27,37 +31,38 @@ class ToolNode(BaseNode): _node_type = NodeType.TOOL def _run(self) -> NodeRunResult: - """ - Run the tool node - """ - - node_data = cast(ToolNodeData, self.node_data) - # fetch tool icon - tool_info = {"provider_type": node_data.provider_type, "provider_id": node_data.provider_id} + tool_info = { + "provider_type": self.node_data.provider_type, + "provider_id": self.node_data.provider_id, + } # get tool runtime try: tool_runtime = ToolManager.get_workflow_tool_runtime( - self.tenant_id, self.app_id, self.node_id, node_data, self.invoke_from + self.tenant_id, self.app_id, self.node_id, self.node_data, self.invoke_from ) except Exception as e: return NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, inputs={}, - metadata={NodeRunMetadataKey.TOOL_INFO: tool_info}, + metadata={ + NodeRunMetadataKey.TOOL_INFO: tool_info, + }, error=f"Failed to get tool runtime: {str(e)}", ) # get parameters tool_parameters = tool_runtime.get_runtime_parameters() or [] parameters = self._generate_parameters( - tool_parameters=tool_parameters, variable_pool=self.graph_runtime_state.variable_pool, node_data=node_data + tool_parameters=tool_parameters, + variable_pool=self.graph_runtime_state.variable_pool, + node_data=self.node_data, ) parameters_for_log = self._generate_parameters( tool_parameters=tool_parameters, variable_pool=self.graph_runtime_state.variable_pool, - node_data=node_data, + node_data=self.node_data, for_log=True, ) @@ -74,7 +79,9 @@ class ToolNode(BaseNode): return NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, inputs=parameters_for_log, - metadata={NodeRunMetadataKey.TOOL_INFO: tool_info}, + metadata={ + NodeRunMetadataKey.TOOL_INFO: tool_info, + }, error=f"Failed to invoke tool: {str(e)}", ) @@ -83,8 +90,14 @@ class ToolNode(BaseNode): return NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, - outputs={"text": plain_text, "files": files, "json": json}, - metadata={NodeRunMetadataKey.TOOL_INFO: tool_info}, + outputs={ + "text": plain_text, + "files": files, + "json": json, + }, + metadata={ + NodeRunMetadataKey.TOOL_INFO: tool_info, + }, inputs=parameters_for_log, ) @@ -116,29 +129,25 @@ class ToolNode(BaseNode): if not parameter: result[parameter_name] = None continue - if parameter.type == ToolParameter.ToolParameterType.FILE: - result[parameter_name] = [v.to_dict() for v in self._fetch_files(variable_pool)] + tool_input = node_data.tool_parameters[parameter_name] + if tool_input.type == "variable": + variable = variable_pool.get(tool_input.value) + if variable is None: + raise ValueError(f"variable {tool_input.value} not exists") + parameter_value = variable.value + elif tool_input.type in {"mixed", "constant"}: + segment_group = variable_pool.convert_template(str(tool_input.value)) + parameter_value = segment_group.log if for_log else segment_group.text else: - tool_input = node_data.tool_parameters[parameter_name] - if tool_input.type == "variable": - # TODO: check if the variable exists in the variable pool - parameter_value = variable_pool.get(tool_input.value).value - else: - segment_group = parser.convert_template( - template=str(tool_input.value), - variable_pool=variable_pool, - ) - parameter_value = segment_group.log if for_log else segment_group.text - result[parameter_name] = parameter_value + raise ValueError(f"unknown tool input type '{tool_input.type}'") + result[parameter_name] = parameter_value return result - def _fetch_files(self, variable_pool: VariablePool) -> list[FileVar]: - variable = variable_pool.get(["sys", SystemVariableKey.FILES.value]) - assert isinstance(variable, ArrayAnyVariable | ArrayAnySegment) - return list(variable.value) if variable else [] - - def _convert_tool_messages(self, messages: list[ToolInvokeMessage]) -> tuple[str, list[FileVar], list[dict]]: + def _convert_tool_messages( + self, + messages: list[ToolInvokeMessage], + ): """ Convert ToolInvokeMessages into tuple[plain_text, files] """ @@ -156,50 +165,86 @@ class ToolNode(BaseNode): return plain_text, files, json - def _extract_tool_response_binary(self, tool_response: list[ToolInvokeMessage]) -> list[FileVar]: + def _extract_tool_response_binary(self, tool_response: list[ToolInvokeMessage]) -> list[File]: """ Extract tool response binary """ result = [] - for response in tool_response: if response.type in {ToolInvokeMessage.MessageType.IMAGE_LINK, ToolInvokeMessage.MessageType.IMAGE}: - url = response.message - ext = path.splitext(url)[1] - mimetype = response.meta.get("mime_type", "image/jpeg") - filename = response.save_as or url.split("/")[-1] + url = str(response.message) if response.message else None + ext = path.splitext(url)[1] if url else ".bin" + tool_file_id = str(url).split("/")[-1].split(".")[0] transfer_method = response.meta.get("transfer_method", FileTransferMethod.TOOL_FILE) - # get tool file id - tool_file_id = url.split("/")[-1].split(".")[0] + with Session(db.engine) as session: + stmt = select(ToolFile).where(ToolFile.id == tool_file_id) + tool_file = session.scalar(stmt) + if tool_file is None: + raise ValueError(f"tool file {tool_file_id} not exists") + result.append( - FileVar( + File( tenant_id=self.tenant_id, type=FileType.IMAGE, transfer_method=transfer_method, - url=url, - related_id=tool_file_id, - filename=filename, + remote_url=url, + related_id=tool_file.id, + filename=tool_file.name, extension=ext, - mime_type=mimetype, + mime_type=tool_file.mimetype, + size=tool_file.size, ) ) elif response.type == ToolInvokeMessage.MessageType.BLOB: # get tool file id - tool_file_id = response.message.split("/")[-1].split(".")[0] + tool_file_id = str(response.message).split("/")[-1].split(".")[0] + with Session(db.engine) as session: + stmt = select(ToolFile).where(ToolFile.id == tool_file_id) + tool_file = session.scalar(stmt) + if tool_file is None: + raise ValueError(f"tool file {tool_file_id} not exists") result.append( - FileVar( + File( tenant_id=self.tenant_id, type=FileType.IMAGE, transfer_method=FileTransferMethod.TOOL_FILE, - related_id=tool_file_id, - filename=response.save_as, + related_id=tool_file.id, + filename=tool_file.name, extension=path.splitext(response.save_as)[1], - mime_type=response.meta.get("mime_type", "application/octet-stream"), + mime_type=tool_file.mimetype, + size=tool_file.size, ) ) elif response.type == ToolInvokeMessage.MessageType.LINK: - pass # TODO: + url = str(response.message) + transfer_method = FileTransferMethod.TOOL_FILE + tool_file_id = url.split("/")[-1].split(".")[0] + with Session(db.engine) as session: + stmt = select(ToolFile).where(ToolFile.id == tool_file_id) + tool_file = session.scalar(stmt) + if tool_file is None: + raise ValueError(f"tool file {tool_file_id} not exists") + if "." in url: + extension = "." + url.split("/")[-1].split(".")[1] + else: + extension = ".bin" + file = File( + tenant_id=self.tenant_id, + type=FileType(response.save_as), + transfer_method=transfer_method, + remote_url=url, + filename=tool_file.name, + related_id=tool_file.id, + extension=extension, + mime_type=tool_file.mimetype, + size=tool_file.size, + ) + result.append(file) + + elif response.type == ToolInvokeMessage.MessageType.FILE: + assert response.meta is not None + result.append(response.meta["file"]) return result @@ -218,12 +263,16 @@ class ToolNode(BaseNode): ] ) - def _extract_tool_response_json(self, tool_response: list[ToolInvokeMessage]) -> list[dict]: + def _extract_tool_response_json(self, tool_response: list[ToolInvokeMessage]): return [message.message for message in tool_response if message.type == ToolInvokeMessage.MessageType.JSON] @classmethod def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: ToolNodeData + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: ToolNodeData, ) -> Mapping[str, Sequence[str]]: """ Extract variable selector to variable mapping @@ -236,7 +285,7 @@ class ToolNode(BaseNode): for parameter_name in node_data.tool_parameters: input = node_data.tool_parameters[parameter_name] if input.type == "mixed": - selectors = VariableTemplateParser(input.value).extract_variable_selectors() + selectors = VariableTemplateParser(str(input.value)).extract_variable_selectors() for selector in selectors: result[selector.variable] = selector.value_selector elif input.type == "variable": diff --git a/api/core/workflow/nodes/variable_aggregator/__init__.py b/api/core/workflow/nodes/variable_aggregator/__init__.py index e69de29bb2..0b6bf2a5b6 100644 --- a/api/core/workflow/nodes/variable_aggregator/__init__.py +++ b/api/core/workflow/nodes/variable_aggregator/__init__.py @@ -0,0 +1,3 @@ +from .variable_aggregator_node import VariableAggregatorNode + +__all__ = ["VariableAggregatorNode"] diff --git a/api/core/workflow/nodes/variable_aggregator/entities.py b/api/core/workflow/nodes/variable_aggregator/entities.py index eb893a04e3..71a930e6b0 100644 --- a/api/core/workflow/nodes/variable_aggregator/entities.py +++ b/api/core/workflow/nodes/variable_aggregator/entities.py @@ -2,7 +2,7 @@ from typing import Literal, Optional from pydantic import BaseModel -from core.workflow.entities.base_node_data_entities import BaseNodeData +from core.workflow.nodes.base import BaseNodeData class AdvancedSettings(BaseModel): diff --git a/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py b/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py index f03eae257a..031a7b8309 100644 --- a/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py +++ b/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py @@ -1,45 +1,45 @@ from collections.abc import Mapping, Sequence -from typing import Any, cast +from typing import Any -from core.workflow.entities.node_entities import NodeRunResult, NodeType -from core.workflow.nodes.base_node import BaseNode +from core.workflow.entities.node_entities import NodeRunResult +from core.workflow.nodes.base import BaseNode +from core.workflow.nodes.enums import NodeType from core.workflow.nodes.variable_aggregator.entities import VariableAssignerNodeData from models.workflow import WorkflowNodeExecutionStatus -class VariableAggregatorNode(BaseNode): +class VariableAggregatorNode(BaseNode[VariableAssignerNodeData]): _node_data_cls = VariableAssignerNodeData _node_type = NodeType.VARIABLE_AGGREGATOR def _run(self) -> NodeRunResult: - node_data = cast(VariableAssignerNodeData, self.node_data) # Get variables outputs = {} inputs = {} - if not node_data.advanced_settings or not node_data.advanced_settings.group_enabled: - for selector in node_data.variables: - variable = self.graph_runtime_state.variable_pool.get_any(selector) + if not self.node_data.advanced_settings or not self.node_data.advanced_settings.group_enabled: + for selector in self.node_data.variables: + variable = self.graph_runtime_state.variable_pool.get(selector) if variable is not None: - outputs = {"output": variable} + outputs = {"output": variable.to_object()} - inputs = {".".join(selector[1:]): variable} + inputs = {".".join(selector[1:]): variable.to_object()} break else: - for group in node_data.advanced_settings.groups: + for group in self.node_data.advanced_settings.groups: for selector in group.variables: - variable = self.graph_runtime_state.variable_pool.get_any(selector) + variable = self.graph_runtime_state.variable_pool.get(selector) if variable is not None: - outputs[group.group_name] = {"output": variable} - inputs[".".join(selector[1:])] = variable + outputs[group.group_name] = {"output": variable.to_object()} + inputs[".".join(selector[1:])] = variable.to_object() break return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, outputs=outputs, inputs=inputs) @classmethod def _extract_variable_selector_to_variable_mapping( - cls, graph_config: Mapping[str, Any], node_id: str, node_data: VariableAssignerNodeData + cls, *, graph_config: Mapping[str, Any], node_id: str, node_data: VariableAssignerNodeData ) -> Mapping[str, Sequence[str]]: """ Extract variable selector to variable mapping diff --git a/api/core/workflow/nodes/variable_assigner/node.py b/api/core/workflow/nodes/variable_assigner/node.py index 3969299795..4e66f640df 100644 --- a/api/core/workflow/nodes/variable_assigner/node.py +++ b/api/core/workflow/nodes/variable_assigner/node.py @@ -1,40 +1,38 @@ -from typing import cast - from sqlalchemy import select from sqlalchemy.orm import Session -from core.app.segments import SegmentType, Variable, factory -from core.workflow.entities.base_node_data_entities import BaseNodeData -from core.workflow.entities.node_entities import NodeRunResult, NodeType -from core.workflow.nodes.base_node import BaseNode +from core.variables import SegmentType, Variable +from core.workflow.entities.node_entities import NodeRunResult +from core.workflow.nodes.base import BaseNode, BaseNodeData +from core.workflow.nodes.enums import NodeType from extensions.ext_database import db -from models import ConversationVariable, WorkflowNodeExecutionStatus +from factories import variable_factory +from models import ConversationVariable +from models.workflow import WorkflowNodeExecutionStatus from .exc import VariableAssignerNodeError from .node_data import VariableAssignerData, WriteMode -class VariableAssignerNode(BaseNode): +class VariableAssignerNode(BaseNode[VariableAssignerData]): _node_data_cls: type[BaseNodeData] = VariableAssignerData _node_type: NodeType = NodeType.CONVERSATION_VARIABLE_ASSIGNER def _run(self) -> NodeRunResult: - data = cast(VariableAssignerData, self.node_data) - # Should be String, Number, Object, ArrayString, ArrayNumber, ArrayObject - original_variable = self.graph_runtime_state.variable_pool.get(data.assigned_variable_selector) + original_variable = self.graph_runtime_state.variable_pool.get(self.node_data.assigned_variable_selector) if not isinstance(original_variable, Variable): raise VariableAssignerNodeError("assigned variable not found") - match data.write_mode: + match self.node_data.write_mode: case WriteMode.OVER_WRITE: - income_value = self.graph_runtime_state.variable_pool.get(data.input_variable_selector) + income_value = self.graph_runtime_state.variable_pool.get(self.node_data.input_variable_selector) if not income_value: raise VariableAssignerNodeError("input value not found") updated_variable = original_variable.model_copy(update={"value": income_value.value}) case WriteMode.APPEND: - income_value = self.graph_runtime_state.variable_pool.get(data.input_variable_selector) + income_value = self.graph_runtime_state.variable_pool.get(self.node_data.input_variable_selector) if not income_value: raise VariableAssignerNodeError("input value not found") updated_value = original_variable.value + [income_value.value] @@ -45,10 +43,10 @@ class VariableAssignerNode(BaseNode): updated_variable = original_variable.model_copy(update={"value": income_value.to_object()}) case _: - raise VariableAssignerNodeError(f"unsupported write mode: {data.write_mode}") + raise VariableAssignerNodeError(f"unsupported write mode: {self.node_data.write_mode}") # Over write the variable. - self.graph_runtime_state.variable_pool.add(data.assigned_variable_selector, updated_variable) + self.graph_runtime_state.variable_pool.add(self.node_data.assigned_variable_selector, updated_variable) # TODO: Move database operation to the pipeline. # Update conversation variable. @@ -80,12 +78,12 @@ def update_conversation_variable(conversation_id: str, variable: Variable): def get_zero_value(t: SegmentType): match t: case SegmentType.ARRAY_OBJECT | SegmentType.ARRAY_STRING | SegmentType.ARRAY_NUMBER: - return factory.build_segment([]) + return variable_factory.build_segment([]) case SegmentType.OBJECT: - return factory.build_segment({}) + return variable_factory.build_segment({}) case SegmentType.STRING: - return factory.build_segment("") + return variable_factory.build_segment("") case SegmentType.NUMBER: - return factory.build_segment(0) + return variable_factory.build_segment(0) case _: raise VariableAssignerNodeError(f"unsupported variable type: {t}") diff --git a/api/core/workflow/nodes/variable_assigner/node_data.py b/api/core/workflow/nodes/variable_assigner/node_data.py index 8ac8eadf7c..70ae29d45f 100644 --- a/api/core/workflow/nodes/variable_assigner/node_data.py +++ b/api/core/workflow/nodes/variable_assigner/node_data.py @@ -2,7 +2,7 @@ from collections.abc import Sequence from enum import Enum from typing import Optional -from core.workflow.entities.base_node_data_entities import BaseNodeData +from core.workflow.nodes.base import BaseNodeData class WriteMode(str, Enum): diff --git a/api/core/workflow/utils/condition/entities.py b/api/core/workflow/utils/condition/entities.py index b8e8b881a5..799c735f54 100644 --- a/api/core/workflow/utils/condition/entities.py +++ b/api/core/workflow/utils/condition/entities.py @@ -1,32 +1,49 @@ -from typing import Literal, Optional +from collections.abc import Sequence +from typing import Literal -from pydantic import BaseModel +from pydantic import BaseModel, Field + +SupportedComparisonOperator = Literal[ + # for string or array + "contains", + "not contains", + "start with", + "end with", + "is", + "is not", + "empty", + "not empty", + "in", + "not in", + "all of", + # for number + "=", + "≠", + ">", + "<", + "≥", + "≤", + "null", + "not null", + # for file + "exists", + "not exists", +] + + +class SubCondition(BaseModel): + key: str + comparison_operator: SupportedComparisonOperator + value: str | Sequence[str] | None = None + + +class SubVariableCondition(BaseModel): + logical_operator: Literal["and", "or"] + conditions: list[SubCondition] = Field(default=list) class Condition(BaseModel): - """ - Condition entity - """ - variable_selector: list[str] - comparison_operator: Literal[ - # for string or array - "contains", - "not contains", - "start with", - "end with", - "is", - "is not", - "empty", - "not empty", - # for number - "=", - "≠", - ">", - "<", - "≥", - "≤", - "null", - "not null", - ] - value: Optional[str] = None + comparison_operator: SupportedComparisonOperator + value: str | Sequence[str] | None = None + sub_variable_condition: SubVariableCondition | None = None diff --git a/api/core/workflow/utils/condition/processor.py b/api/core/workflow/utils/condition/processor.py index 395ee82478..19473f39d2 100644 --- a/api/core/workflow/utils/condition/processor.py +++ b/api/core/workflow/utils/condition/processor.py @@ -1,381 +1,385 @@ from collections.abc import Sequence -from typing import Any, Optional +from typing import Any, Literal -from core.file.file_obj import FileVar +from core.file import FileAttribute, file_manager +from core.variables import ArrayFileSegment from core.workflow.entities.variable_pool import VariablePool -from core.workflow.utils.condition.entities import Condition -from core.workflow.utils.variable_template_parser import VariableTemplateParser + +from .entities import Condition, SubCondition, SupportedComparisonOperator class ConditionProcessor: - def process_conditions(self, variable_pool: VariablePool, conditions: Sequence[Condition]): - input_conditions = [] - group_result = [] - - index = 0 - for condition in conditions: - index += 1 - actual_value = variable_pool.get_any(condition.variable_selector) - - expected_value = None - if condition.value is not None: - variable_template_parser = VariableTemplateParser(template=condition.value) - variable_selectors = variable_template_parser.extract_variable_selectors() - if variable_selectors: - for variable_selector in variable_selectors: - value = variable_pool.get_any(variable_selector.value_selector) - expected_value = variable_template_parser.format({variable_selector.variable: value}) - - if expected_value is None: - expected_value = condition.value - else: - expected_value = condition.value - - comparison_operator = condition.comparison_operator - input_conditions.append( - { - "actual_value": actual_value, - "expected_value": expected_value, - "comparison_operator": comparison_operator, - } - ) - - result = self.evaluate_condition(actual_value, comparison_operator, expected_value) - group_result.append(result) - - return input_conditions, group_result - - def evaluate_condition( + def process_conditions( self, - actual_value: Optional[str | int | float | dict[Any, Any] | list[Any] | FileVar | None], - comparison_operator: str, - expected_value: Optional[str] = None, - ) -> bool: - """ - Evaluate condition - :param actual_value: actual value - :param expected_value: expected value - :param comparison_operator: comparison operator + *, + variable_pool: VariablePool, + conditions: Sequence[Condition], + operator: Literal["and", "or"], + ): + input_conditions = [] + group_results = [] - :return: bool - """ - if comparison_operator == "contains": - return self._assert_contains(actual_value, expected_value) - elif comparison_operator == "not contains": - return self._assert_not_contains(actual_value, expected_value) - elif comparison_operator == "start with": - return self._assert_start_with(actual_value, expected_value) - elif comparison_operator == "end with": - return self._assert_end_with(actual_value, expected_value) - elif comparison_operator == "is": - return self._assert_is(actual_value, expected_value) - elif comparison_operator == "is not": - return self._assert_is_not(actual_value, expected_value) - elif comparison_operator == "empty": - return self._assert_empty(actual_value) - elif comparison_operator == "not empty": - return self._assert_not_empty(actual_value) - elif comparison_operator == "=": - return self._assert_equal(actual_value, expected_value) - elif comparison_operator == "≠": - return self._assert_not_equal(actual_value, expected_value) - elif comparison_operator == ">": - return self._assert_greater_than(actual_value, expected_value) - elif comparison_operator == "<": - return self._assert_less_than(actual_value, expected_value) - elif comparison_operator == "≥": - return self._assert_greater_than_or_equal(actual_value, expected_value) - elif comparison_operator == "≤": - return self._assert_less_than_or_equal(actual_value, expected_value) - elif comparison_operator == "null": - return self._assert_null(actual_value) - elif comparison_operator == "not null": - return self._assert_not_null(actual_value) - else: - raise ValueError(f"Invalid comparison operator: {comparison_operator}") + for condition in conditions: + variable = variable_pool.get(condition.variable_selector) + if variable is None: + raise ValueError(f"Variable {condition.variable_selector} not found") - def _assert_contains(self, actual_value: Optional[str | list], expected_value: str) -> bool: - """ - Assert contains - :param actual_value: actual value - :param expected_value: expected value - :return: - """ - if not actual_value: - return False + if isinstance(variable, ArrayFileSegment) and condition.comparison_operator in { + "contains", + "not contains", + "all of", + }: + # check sub conditions + if not condition.sub_variable_condition: + raise ValueError("Sub variable is required") + result = _process_sub_conditions( + variable=variable, + sub_conditions=condition.sub_variable_condition.conditions, + operator=condition.sub_variable_condition.logical_operator, + ) + elif condition.comparison_operator in { + "exists", + "not exists", + }: + result = _evaluate_condition( + value=variable.value, + operator=condition.comparison_operator, + expected=None, + ) + else: + actual_value = variable.value if variable else None + expected_value = condition.value + if isinstance(expected_value, str): + expected_value = variable_pool.convert_template(expected_value).text + input_conditions.append( + { + "actual_value": actual_value, + "expected_value": expected_value, + "comparison_operator": condition.comparison_operator, + } + ) + result = _evaluate_condition( + value=actual_value, + operator=condition.comparison_operator, + expected=expected_value, + ) + group_results.append(result) - if not isinstance(actual_value, str | list): - raise ValueError("Invalid actual value type: string or array") + final_result = all(group_results) if operator == "and" else any(group_results) + return input_conditions, group_results, final_result - if expected_value not in actual_value: - return False - return True - def _assert_not_contains(self, actual_value: Optional[str | list], expected_value: str) -> bool: - """ - Assert not contains - :param actual_value: actual value - :param expected_value: expected value - :return: - """ - if not actual_value: - return True +def _evaluate_condition( + *, + operator: SupportedComparisonOperator, + value: Any, + expected: str | Sequence[str] | None, +) -> bool: + match operator: + case "contains": + return _assert_contains(value=value, expected=expected) + case "not contains": + return _assert_not_contains(value=value, expected=expected) + case "start with": + return _assert_start_with(value=value, expected=expected) + case "end with": + return _assert_end_with(value=value, expected=expected) + case "is": + return _assert_is(value=value, expected=expected) + case "is not": + return _assert_is_not(value=value, expected=expected) + case "empty": + return _assert_empty(value=value) + case "not empty": + return _assert_not_empty(value=value) + case "=": + return _assert_equal(value=value, expected=expected) + case "≠": + return _assert_not_equal(value=value, expected=expected) + case ">": + return _assert_greater_than(value=value, expected=expected) + case "<": + return _assert_less_than(value=value, expected=expected) + case "≥": + return _assert_greater_than_or_equal(value=value, expected=expected) + case "≤": + return _assert_less_than_or_equal(value=value, expected=expected) + case "null": + return _assert_null(value=value) + case "not null": + return _assert_not_null(value=value) + case "in": + return _assert_in(value=value, expected=expected) + case "not in": + return _assert_not_in(value=value, expected=expected) + case "all of" if isinstance(expected, list): + return _assert_all_of(value=value, expected=expected) + case "exists": + return _assert_exists(value=value) + case "not exists": + return _assert_not_exists(value=value) + case _: + raise ValueError(f"Unsupported operator: {operator}") - if not isinstance(actual_value, str | list): - raise ValueError("Invalid actual value type: string or array") - if expected_value in actual_value: - return False - return True - - def _assert_start_with(self, actual_value: Optional[str], expected_value: str) -> bool: - """ - Assert start with - :param actual_value: actual value - :param expected_value: expected value - :return: - """ - if not actual_value: - return False - - if not isinstance(actual_value, str): - raise ValueError("Invalid actual value type: string") - - if not actual_value.startswith(expected_value): - return False - return True - - def _assert_end_with(self, actual_value: Optional[str], expected_value: str) -> bool: - """ - Assert end with - :param actual_value: actual value - :param expected_value: expected value - :return: - """ - if not actual_value: - return False - - if not isinstance(actual_value, str): - raise ValueError("Invalid actual value type: string") - - if not actual_value.endswith(expected_value): - return False - return True - - def _assert_is(self, actual_value: Optional[str], expected_value: str) -> bool: - """ - Assert is - :param actual_value: actual value - :param expected_value: expected value - :return: - """ - if actual_value is None: - return False - - if not isinstance(actual_value, str): - raise ValueError("Invalid actual value type: string") - - if actual_value != expected_value: - return False - return True - - def _assert_is_not(self, actual_value: Optional[str], expected_value: str) -> bool: - """ - Assert is not - :param actual_value: actual value - :param expected_value: expected value - :return: - """ - if actual_value is None: - return False - - if not isinstance(actual_value, str): - raise ValueError("Invalid actual value type: string") - - if actual_value == expected_value: - return False - return True - - def _assert_empty(self, actual_value: Optional[str]) -> bool: - """ - Assert empty - :param actual_value: actual value - :return: - """ - if not actual_value: - return True +def _assert_contains(*, value: Any, expected: Any) -> bool: + if not value: return False - def _assert_not_empty(self, actual_value: Optional[str]) -> bool: - """ - Assert not empty - :param actual_value: actual value - :return: - """ - if actual_value: - return True + if not isinstance(value, str | list): + raise ValueError("Invalid actual value type: string or array") + + if expected not in value: + return False + return True + + +def _assert_not_contains(*, value: Any, expected: Any) -> bool: + if not value: + return True + + if not isinstance(value, str | list): + raise ValueError("Invalid actual value type: string or array") + + if expected in value: + return False + return True + + +def _assert_start_with(*, value: Any, expected: Any) -> bool: + if not value: return False - def _assert_equal(self, actual_value: Optional[int | float], expected_value: str | int | float) -> bool: - """ - Assert equal - :param actual_value: actual value - :param expected_value: expected value - :return: - """ - if actual_value is None: - return False + if not isinstance(value, str): + raise ValueError("Invalid actual value type: string") - if not isinstance(actual_value, int | float): - raise ValueError("Invalid actual value type: number") + if not value.startswith(expected): + return False + return True - if isinstance(actual_value, int): - expected_value = int(expected_value) - else: - expected_value = float(expected_value) - if actual_value != expected_value: - return False - return True - - def _assert_not_equal(self, actual_value: Optional[int | float], expected_value: str | int | float) -> bool: - """ - Assert not equal - :param actual_value: actual value - :param expected_value: expected value - :return: - """ - if actual_value is None: - return False - - if not isinstance(actual_value, int | float): - raise ValueError("Invalid actual value type: number") - - if isinstance(actual_value, int): - expected_value = int(expected_value) - else: - expected_value = float(expected_value) - - if actual_value == expected_value: - return False - return True - - def _assert_greater_than(self, actual_value: Optional[int | float], expected_value: str | int | float) -> bool: - """ - Assert greater than - :param actual_value: actual value - :param expected_value: expected value - :return: - """ - if actual_value is None: - return False - - if not isinstance(actual_value, int | float): - raise ValueError("Invalid actual value type: number") - - if isinstance(actual_value, int): - expected_value = int(expected_value) - else: - expected_value = float(expected_value) - - if actual_value <= expected_value: - return False - return True - - def _assert_less_than(self, actual_value: Optional[int | float], expected_value: str | int | float) -> bool: - """ - Assert less than - :param actual_value: actual value - :param expected_value: expected value - :return: - """ - if actual_value is None: - return False - - if not isinstance(actual_value, int | float): - raise ValueError("Invalid actual value type: number") - - if isinstance(actual_value, int): - expected_value = int(expected_value) - else: - expected_value = float(expected_value) - - if actual_value >= expected_value: - return False - return True - - def _assert_greater_than_or_equal( - self, actual_value: Optional[int | float], expected_value: str | int | float - ) -> bool: - """ - Assert greater than or equal - :param actual_value: actual value - :param expected_value: expected value - :return: - """ - if actual_value is None: - return False - - if not isinstance(actual_value, int | float): - raise ValueError("Invalid actual value type: number") - - if isinstance(actual_value, int): - expected_value = int(expected_value) - else: - expected_value = float(expected_value) - - if actual_value < expected_value: - return False - return True - - def _assert_less_than_or_equal( - self, actual_value: Optional[int | float], expected_value: str | int | float - ) -> bool: - """ - Assert less than or equal - :param actual_value: actual value - :param expected_value: expected value - :return: - """ - if actual_value is None: - return False - - if not isinstance(actual_value, int | float): - raise ValueError("Invalid actual value type: number") - - if isinstance(actual_value, int): - expected_value = int(expected_value) - else: - expected_value = float(expected_value) - - if actual_value > expected_value: - return False - return True - - def _assert_null(self, actual_value: Optional[int | float]) -> bool: - """ - Assert null - :param actual_value: actual value - :return: - """ - if actual_value is None: - return True +def _assert_end_with(*, value: Any, expected: Any) -> bool: + if not value: return False - def _assert_not_null(self, actual_value: Optional[int | float]) -> bool: - """ - Assert not null - :param actual_value: actual value - :return: - """ - if actual_value is not None: - return True + if not isinstance(value, str): + raise ValueError("Invalid actual value type: string") + + if not value.endswith(expected): + return False + return True + + +def _assert_is(*, value: Any, expected: Any) -> bool: + if value is None: return False + if not isinstance(value, str): + raise ValueError("Invalid actual value type: string") -class ConditionAssertionError(Exception): - def __init__(self, message: str, conditions: list[dict], sub_condition_compare_results: list[dict]) -> None: - self.message = message - self.conditions = conditions - self.sub_condition_compare_results = sub_condition_compare_results - super().__init__(self.message) + if value != expected: + return False + return True + + +def _assert_is_not(*, value: Any, expected: Any) -> bool: + if value is None: + return False + + if not isinstance(value, str): + raise ValueError("Invalid actual value type: string") + + if value == expected: + return False + return True + + +def _assert_empty(*, value: Any) -> bool: + if not value: + return True + return False + + +def _assert_not_empty(*, value: Any) -> bool: + if value: + return True + return False + + +def _assert_equal(*, value: Any, expected: Any) -> bool: + if value is None: + return False + + if not isinstance(value, int | float): + raise ValueError("Invalid actual value type: number") + + if isinstance(value, int): + expected = int(expected) + else: + expected = float(expected) + + if value != expected: + return False + return True + + +def _assert_not_equal(*, value: Any, expected: Any) -> bool: + if value is None: + return False + + if not isinstance(value, int | float): + raise ValueError("Invalid actual value type: number") + + if isinstance(value, int): + expected = int(expected) + else: + expected = float(expected) + + if value == expected: + return False + return True + + +def _assert_greater_than(*, value: Any, expected: Any) -> bool: + if value is None: + return False + + if not isinstance(value, int | float): + raise ValueError("Invalid actual value type: number") + + if isinstance(value, int): + expected = int(expected) + else: + expected = float(expected) + + if value <= expected: + return False + return True + + +def _assert_less_than(*, value: Any, expected: Any) -> bool: + if value is None: + return False + + if not isinstance(value, int | float): + raise ValueError("Invalid actual value type: number") + + if isinstance(value, int): + expected = int(expected) + else: + expected = float(expected) + + if value >= expected: + return False + return True + + +def _assert_greater_than_or_equal(*, value: Any, expected: Any) -> bool: + if value is None: + return False + + if not isinstance(value, int | float): + raise ValueError("Invalid actual value type: number") + + if isinstance(value, int): + expected = int(expected) + else: + expected = float(expected) + + if value < expected: + return False + return True + + +def _assert_less_than_or_equal(*, value: Any, expected: Any) -> bool: + if value is None: + return False + + if not isinstance(value, int | float): + raise ValueError("Invalid actual value type: number") + + if isinstance(value, int): + expected = int(expected) + else: + expected = float(expected) + + if value > expected: + return False + return True + + +def _assert_null(*, value: Any) -> bool: + if value is None: + return True + return False + + +def _assert_not_null(*, value: Any) -> bool: + if value is not None: + return True + return False + + +def _assert_in(*, value: Any, expected: Any) -> bool: + if not value: + return False + + if not isinstance(expected, list): + raise ValueError("Invalid expected value type: array") + + if value not in expected: + return False + return True + + +def _assert_not_in(*, value: Any, expected: Any) -> bool: + if not value: + return True + + if not isinstance(expected, list): + raise ValueError("Invalid expected value type: array") + + if value in expected: + return False + return True + + +def _assert_all_of(*, value: Any, expected: Sequence[str]) -> bool: + if not value: + return False + + if not all(item in value for item in expected): + return False + return True + + +def _assert_exists(*, value: Any) -> bool: + return value is not None + + +def _assert_not_exists(*, value: Any) -> bool: + return value is None + + +def _process_sub_conditions( + variable: ArrayFileSegment, + sub_conditions: Sequence[SubCondition], + operator: Literal["and", "or"], +) -> bool: + files = variable.value + group_results = [] + for condition in sub_conditions: + key = FileAttribute(condition.key) + values = [file_manager.get_attr(file=file, attr=key) for file in files] + sub_group_results = [ + _evaluate_condition( + value=value, + operator=condition.comparison_operator, + expected=condition.value, + ) + for value in values + ] + # Determine the result based on the presence of "not" in the comparison operator + result = all(sub_group_results) if "not" in condition.comparison_operator else any(sub_group_results) + group_results.append(result) + return all(group_results) if operator == "and" else any(group_results) diff --git a/api/core/workflow/utils/variable_template_parser.py b/api/core/workflow/utils/variable_template_parser.py index fd0e48b862..1d8fb38ebf 100644 --- a/api/core/workflow/utils/variable_template_parser.py +++ b/api/core/workflow/utils/variable_template_parser.py @@ -1,42 +1,21 @@ import re -from collections.abc import Mapping +from collections.abc import Mapping, Sequence from typing import Any from core.workflow.entities.variable_entities import VariableSelector -from core.workflow.entities.variable_pool import VariablePool REGEX = re.compile(r"\{\{(#[a-zA-Z0-9_]{1,50}(\.[a-zA-Z_][a-zA-Z0-9_]{0,29}){1,10}#)\}\}") +SELECTOR_PATTERN = re.compile(r"\{\{(#[a-zA-Z0-9_]{1,50}(?:\.[a-zA-Z_][a-zA-Z0-9_]{0,29}){1,10}#)\}\}") -def parse_mixed_template(*, template: str, variable_pool: VariablePool) -> str: - """ - This is an alternative to the VariableTemplateParser class, - offering the same functionality but with better readability and ease of use. - """ - variable_keys = [match[0] for match in re.findall(REGEX, template)] - variable_keys = list(set(variable_keys)) - # This key_selector is a tuple of (key, selector) where selector is a list of keys - # e.g. ('#node_id.query.name#', ['node_id', 'query', 'name']) - key_selectors = filter( - lambda t: len(t[1]) >= 2, - ((key, selector.replace("#", "").split(".")) for key, selector in zip(variable_keys, variable_keys)), - ) - inputs = {key: variable_pool.get_any(selector) for key, selector in key_selectors} - - def replacer(match): - key = match.group(1) - # return original matched string if key not found - value = inputs.get(key, match.group(0)) - if value is None: - value = "" - value = str(value) - # remove template variables if required - return re.sub(REGEX, r"{\1}", value) - - result = re.sub(REGEX, replacer, template) - result = re.sub(r"<\|.*?\|>", "", result) - return result +def extract_selectors_from_template(template: str, /) -> Sequence[VariableSelector]: + parts = SELECTOR_PATTERN.split(template) + selectors = [] + for part in filter(lambda x: x, parts): + if "." in part and part[0] == "#" and part[-1] == "#": + selectors.append(VariableSelector(variable=f"{part}", value_selector=part[1:-1].split("."))) + return selectors class VariableTemplateParser: diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index 74a598ada5..eb812bad21 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -8,10 +8,8 @@ from configs import dify_config from core.app.app_config.entities import FileExtraConfig from core.app.apps.base_app_queue_manager import GenerateTaskStoppedError from core.app.entities.app_invoke_entities import InvokeFrom -from core.file.file_obj import FileTransferMethod, FileType, FileVar -from core.workflow.callbacks.base_workflow_callback import WorkflowCallback -from core.workflow.entities.base_node_data_entities import BaseNodeData -from core.workflow.entities.node_entities import NodeType, UserFrom +from core.file.models import File, FileTransferMethod, FileType, ImageConfig +from core.workflow.callbacks import WorkflowCallback from core.workflow.entities.variable_pool import VariablePool from core.workflow.errors import WorkflowNodeRunFailedError from core.workflow.graph_engine.entities.event import GraphEngineEvent, GraphRunFailedEvent, InNodeEvent @@ -19,10 +17,12 @@ from core.workflow.graph_engine.entities.graph import Graph from core.workflow.graph_engine.entities.graph_init_params import GraphInitParams from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState from core.workflow.graph_engine.graph_engine import GraphEngine -from core.workflow.nodes.base_node import BaseNode -from core.workflow.nodes.event import RunEvent -from core.workflow.nodes.llm.entities import LLMNodeData -from core.workflow.nodes.node_mapping import node_classes +from core.workflow.nodes import NodeType +from core.workflow.nodes.base import BaseNode, BaseNodeData +from core.workflow.nodes.event import NodeEvent +from core.workflow.nodes.llm import LLMNodeData +from core.workflow.nodes.node_mapping import node_type_classes_mapping +from models.enums import UserFrom from models.workflow import ( Workflow, WorkflowType, @@ -115,7 +115,7 @@ class WorkflowEntry: @classmethod def single_step_run( cls, workflow: Workflow, node_id: str, user_id: str, user_inputs: dict - ) -> tuple[BaseNode, Generator[RunEvent | InNodeEvent, None, None]]: + ) -> tuple[BaseNode, Generator[NodeEvent | InNodeEvent, None, None]]: """ Single step run workflow node :param workflow: Workflow instance @@ -144,8 +144,8 @@ class WorkflowEntry: raise ValueError("node id not found in workflow graph") # Get node class - node_type = NodeType.value_of(node_config.get("data", {}).get("type")) - node_cls = node_classes.get(node_type) + node_type = NodeType(node_config.get("data", {}).get("type")) + node_cls = node_type_classes_mapping.get(node_type) node_cls = cast(type[BaseNode], node_cls) if not node_cls: @@ -162,7 +162,7 @@ class WorkflowEntry: graph = Graph.init(graph_config=workflow.graph_dict) # init workflow run state - node_instance: BaseNode = node_cls( + node_instance = node_cls( id=str(uuid.uuid4()), config=node_config, graph_init_params=GraphInitParams( @@ -205,32 +205,27 @@ class WorkflowEntry: except Exception as e: raise WorkflowNodeRunFailedError(node_instance=node_instance, error=str(e)) - @classmethod - def handle_special_values(cls, value: Optional[Mapping[str, Any]]) -> Optional[dict]: - """ - Handle special values - :param value: value - :return: - """ - if not value: - return None + @staticmethod + def handle_special_values(value: Optional[Mapping[str, Any]]) -> Mapping[str, Any] | None: + return WorkflowEntry._handle_special_values(value) - new_value = dict(value) if value else {} - if isinstance(new_value, dict): - for key, val in new_value.items(): - if isinstance(val, FileVar): - new_value[key] = val.to_dict() - elif isinstance(val, list): - new_val = [] - for v in val: - if isinstance(v, FileVar): - new_val.append(v.to_dict()) - else: - new_val.append(v) - - new_value[key] = new_val - - return new_value + @staticmethod + def _handle_special_values(value: Any) -> Any: + if value is None: + return value + if isinstance(value, dict): + res = {} + for k, v in value.items(): + res[k] = WorkflowEntry._handle_special_values(v) + return res + if isinstance(value, list): + res = [] + for item in value: + res.append(WorkflowEntry._handle_special_values(item)) + return res + if isinstance(value, File): + return value.to_dict() + return value @classmethod def mapping_user_inputs_to_variable_pool( @@ -276,20 +271,24 @@ class WorkflowEntry: for item in input_value: if isinstance(item, dict) and "type" in item and item["type"] == "image": transfer_method = FileTransferMethod.value_of(item.get("transfer_method")) - file = FileVar( + file = File( tenant_id=tenant_id, type=FileType.IMAGE, transfer_method=transfer_method, - url=item.get("url") if transfer_method == FileTransferMethod.REMOTE_URL else None, + remote_url=item.get("url") + if transfer_method == FileTransferMethod.REMOTE_URL + else None, related_id=item.get("upload_file_id") if transfer_method == FileTransferMethod.LOCAL_FILE else None, - extra_config=FileExtraConfig(image_config={"detail": detail} if detail else None), + _extra_config=FileExtraConfig( + image_config=ImageConfig(detail=detail) if detail else None + ), ) new_value.append(file) if new_value: - value = new_value + input_value = new_value # append variable and value to variable pool variable_pool.add([variable_node_id] + variable_key_list, input_value) diff --git a/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py b/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py index f96bb5ef74..9c5955c8c5 100644 --- a/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py +++ b/api/events/event_handlers/delete_tool_parameters_cache_when_sync_draft_workflow.py @@ -1,6 +1,6 @@ from core.tools.tool_manager import ToolManager from core.tools.utils.configuration import ToolParameterConfigurationManager -from core.workflow.entities.node_entities import NodeType +from core.workflow.nodes import NodeType from core.workflow.nodes.tool.entities import ToolEntity from events.app_event import app_draft_workflow_was_synced diff --git a/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py b/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py index c5e98e263f..453395e8d7 100644 --- a/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py +++ b/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py @@ -1,6 +1,6 @@ from typing import cast -from core.workflow.entities.node_entities import NodeType +from core.workflow.nodes import NodeType from core.workflow.nodes.knowledge_retrieval.entities import KnowledgeRetrievalNodeData from events.app_event import app_published_workflow_was_updated from extensions.ext_database import db diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index 0ff9f90847..504899c276 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -1,8 +1,11 @@ from datetime import timedelta from celery import Celery, Task +from celery.schedules import crontab from flask import Flask +from configs import dify_config + def init_app(app: Flask) -> Celery: class FlaskTask(Task): @@ -12,19 +15,19 @@ def init_app(app: Flask) -> Celery: broker_transport_options = {} - if app.config.get("CELERY_USE_SENTINEL"): + if dify_config.CELERY_USE_SENTINEL: broker_transport_options = { - "master_name": app.config.get("CELERY_SENTINEL_MASTER_NAME"), + "master_name": dify_config.CELERY_SENTINEL_MASTER_NAME, "sentinel_kwargs": { - "socket_timeout": app.config.get("CELERY_SENTINEL_SOCKET_TIMEOUT", 0.1), + "socket_timeout": dify_config.CELERY_SENTINEL_SOCKET_TIMEOUT, }, } celery_app = Celery( app.name, task_cls=FlaskTask, - broker=app.config.get("CELERY_BROKER_URL"), - backend=app.config.get("CELERY_BACKEND"), + broker=dify_config.CELERY_BROKER_URL, + backend=dify_config.CELERY_BACKEND, task_ignore_result=True, ) @@ -37,12 +40,12 @@ def init_app(app: Flask) -> Celery: } celery_app.conf.update( - result_backend=app.config.get("CELERY_RESULT_BACKEND"), + result_backend=dify_config.CELERY_RESULT_BACKEND, broker_transport_options=broker_transport_options, broker_connection_retry_on_startup=True, ) - if app.config.get("BROKER_USE_SSL"): + if dify_config.BROKER_USE_SSL: celery_app.conf.update( broker_use_ssl=ssl_options, # Add the SSL options to the broker configuration ) @@ -53,8 +56,10 @@ def init_app(app: Flask) -> Celery: imports = [ "schedule.clean_embedding_cache_task", "schedule.clean_unused_datasets_task", + "schedule.create_tidb_serverless_task", + "schedule.update_tidb_serverless_status_task", ] - day = app.config.get("CELERY_BEAT_SCHEDULER_TIME") + day = dify_config.CELERY_BEAT_SCHEDULER_TIME beat_schedule = { "clean_embedding_cache_task": { "task": "schedule.clean_embedding_cache_task.clean_embedding_cache_task", @@ -64,6 +69,14 @@ def init_app(app: Flask) -> Celery: "task": "schedule.clean_unused_datasets_task.clean_unused_datasets_task", "schedule": timedelta(days=day), }, + "create_tidb_serverless_task": { + "task": "schedule.create_tidb_serverless_task.create_tidb_serverless_task", + "schedule": crontab(minute="0", hour="*"), + }, + "update_tidb_serverless_status_task": { + "task": "schedule.update_tidb_serverless_status_task.update_tidb_serverless_status_task", + "schedule": crontab(minute="30", hour="*"), + }, } celery_app.conf.update(beat_schedule=beat_schedule, imports=imports) diff --git a/api/extensions/ext_compress.py b/api/extensions/ext_compress.py index 38e67749fc..a6de28597b 100644 --- a/api/extensions/ext_compress.py +++ b/api/extensions/ext_compress.py @@ -1,8 +1,10 @@ from flask import Flask +from configs import dify_config + def init_app(app: Flask): - if app.config.get("API_COMPRESSION_ENABLED"): + if dify_config.API_COMPRESSION_ENABLED: from flask_compress import Compress app.config["COMPRESS_MIMETYPES"] = [ diff --git a/api/extensions/ext_mail.py b/api/extensions/ext_mail.py index b435294abc..5c5b331d8a 100644 --- a/api/extensions/ext_mail.py +++ b/api/extensions/ext_mail.py @@ -4,6 +4,8 @@ from typing import Optional import resend from flask import Flask +from configs import dify_config + class Mail: def __init__(self): @@ -14,41 +16,44 @@ class Mail: return self._client is not None def init_app(self, app: Flask): - if app.config.get("MAIL_TYPE"): - if app.config.get("MAIL_DEFAULT_SEND_FROM"): - self._default_send_from = app.config.get("MAIL_DEFAULT_SEND_FROM") + mail_type = dify_config.MAIL_TYPE + if not mail_type: + logging.warning("MAIL_TYPE is not set") + return - if app.config.get("MAIL_TYPE") == "resend": - api_key = app.config.get("RESEND_API_KEY") + if dify_config.MAIL_DEFAULT_SEND_FROM: + self._default_send_from = dify_config.MAIL_DEFAULT_SEND_FROM + + match mail_type: + case "resend": + api_key = dify_config.RESEND_API_KEY if not api_key: raise ValueError("RESEND_API_KEY is not set") - api_url = app.config.get("RESEND_API_URL") + api_url = dify_config.RESEND_API_URL if api_url: resend.api_url = api_url resend.api_key = api_key self._client = resend.Emails - elif app.config.get("MAIL_TYPE") == "smtp": + case "smtp": from libs.smtp import SMTPClient - if not app.config.get("SMTP_SERVER") or not app.config.get("SMTP_PORT"): + if not dify_config.SMTP_SERVER or not dify_config.SMTP_PORT: raise ValueError("SMTP_SERVER and SMTP_PORT are required for smtp mail type") - if not app.config.get("SMTP_USE_TLS") and app.config.get("SMTP_OPPORTUNISTIC_TLS"): + if not dify_config.SMTP_USE_TLS and dify_config.SMTP_OPPORTUNISTIC_TLS: raise ValueError("SMTP_OPPORTUNISTIC_TLS is not supported without enabling SMTP_USE_TLS") self._client = SMTPClient( - server=app.config.get("SMTP_SERVER"), - port=app.config.get("SMTP_PORT"), - username=app.config.get("SMTP_USERNAME"), - password=app.config.get("SMTP_PASSWORD"), - _from=app.config.get("MAIL_DEFAULT_SEND_FROM"), - use_tls=app.config.get("SMTP_USE_TLS"), - opportunistic_tls=app.config.get("SMTP_OPPORTUNISTIC_TLS"), + server=dify_config.SMTP_SERVER, + port=dify_config.SMTP_PORT, + username=dify_config.SMTP_USERNAME, + password=dify_config.SMTP_PASSWORD, + _from=dify_config.MAIL_DEFAULT_SEND_FROM, + use_tls=dify_config.SMTP_USE_TLS, + opportunistic_tls=dify_config.SMTP_OPPORTUNISTIC_TLS, ) - else: - raise ValueError("Unsupported mail type {}".format(app.config.get("MAIL_TYPE"))) - else: - logging.warning("MAIL_TYPE is not set") + case _: + raise ValueError("Unsupported mail type {}".format(mail_type)) def send(self, to: str, subject: str, html: str, from_: Optional[str] = None): if not self._client: diff --git a/api/extensions/ext_redis.py b/api/extensions/ext_redis.py index 054769e7ff..e1f8409f21 100644 --- a/api/extensions/ext_redis.py +++ b/api/extensions/ext_redis.py @@ -2,6 +2,8 @@ import redis from redis.connection import Connection, SSLConnection from redis.sentinel import Sentinel +from configs import dify_config + class RedisClientWrapper(redis.Redis): """ @@ -43,37 +45,37 @@ redis_client = RedisClientWrapper() def init_app(app): global redis_client connection_class = Connection - if app.config.get("REDIS_USE_SSL"): + if dify_config.REDIS_USE_SSL: connection_class = SSLConnection redis_params = { - "username": app.config.get("REDIS_USERNAME"), - "password": app.config.get("REDIS_PASSWORD"), - "db": app.config.get("REDIS_DB"), + "username": dify_config.REDIS_USERNAME, + "password": dify_config.REDIS_PASSWORD, + "db": dify_config.REDIS_DB, "encoding": "utf-8", "encoding_errors": "strict", "decode_responses": False, } - if app.config.get("REDIS_USE_SENTINEL"): + if dify_config.REDIS_USE_SENTINEL: sentinel_hosts = [ - (node.split(":")[0], int(node.split(":")[1])) for node in app.config.get("REDIS_SENTINELS").split(",") + (node.split(":")[0], int(node.split(":")[1])) for node in dify_config.REDIS_SENTINELS.split(",") ] sentinel = Sentinel( sentinel_hosts, sentinel_kwargs={ - "socket_timeout": app.config.get("REDIS_SENTINEL_SOCKET_TIMEOUT", 0.1), - "username": app.config.get("REDIS_SENTINEL_USERNAME"), - "password": app.config.get("REDIS_SENTINEL_PASSWORD"), + "socket_timeout": dify_config.REDIS_SENTINEL_SOCKET_TIMEOUT, + "username": dify_config.REDIS_SENTINEL_USERNAME, + "password": dify_config.REDIS_SENTINEL_PASSWORD, }, ) - master = sentinel.master_for(app.config.get("REDIS_SENTINEL_SERVICE_NAME"), **redis_params) + master = sentinel.master_for(dify_config.REDIS_SENTINEL_SERVICE_NAME, **redis_params) redis_client.initialize(master) else: redis_params.update( { - "host": app.config.get("REDIS_HOST"), - "port": app.config.get("REDIS_PORT"), + "host": dify_config.REDIS_HOST, + "port": dify_config.REDIS_PORT, "connection_class": connection_class, } ) diff --git a/api/extensions/ext_sentry.py b/api/extensions/ext_sentry.py index e255e7eb35..11f1dd93c6 100644 --- a/api/extensions/ext_sentry.py +++ b/api/extensions/ext_sentry.py @@ -5,6 +5,7 @@ from sentry_sdk.integrations.celery import CeleryIntegration from sentry_sdk.integrations.flask import FlaskIntegration from werkzeug.exceptions import HTTPException +from configs import dify_config from core.model_runtime.errors.invoke import InvokeRateLimitError @@ -18,9 +19,9 @@ def before_send(event, hint): def init_app(app): - if app.config.get("SENTRY_DSN"): + if dify_config.SENTRY_DSN: sentry_sdk.init( - dsn=app.config.get("SENTRY_DSN"), + dsn=dify_config.SENTRY_DSN, integrations=[FlaskIntegration(), CeleryIntegration()], ignore_errors=[ HTTPException, @@ -29,9 +30,9 @@ def init_app(app): InvokeRateLimitError, parse_error.defaultErrorResponse, ], - traces_sample_rate=app.config.get("SENTRY_TRACES_SAMPLE_RATE", 1.0), - profiles_sample_rate=app.config.get("SENTRY_PROFILES_SAMPLE_RATE", 1.0), - environment=app.config.get("DEPLOY_ENV"), - release=f"dify-{app.config.get('CURRENT_VERSION')}-{app.config.get('COMMIT_SHA')}", + traces_sample_rate=dify_config.SENTRY_TRACES_SAMPLE_RATE, + profiles_sample_rate=dify_config.SENTRY_PROFILES_SAMPLE_RATE, + environment=dify_config.DEPLOY_ENV, + release=f"dify-{dify_config.CURRENT_VERSION}-{dify_config.COMMIT_SHA}", before_send=before_send, ) diff --git a/api/extensions/ext_storage.py b/api/extensions/ext_storage.py index f90629262d..86fadf23d7 100644 --- a/api/extensions/ext_storage.py +++ b/api/extensions/ext_storage.py @@ -15,7 +15,8 @@ class Storage: def init_app(self, app: Flask): storage_factory = self.get_storage_factory(dify_config.STORAGE_TYPE) - self.storage_runner = storage_factory(app=app) + with app.app_context(): + self.storage_runner = storage_factory() @staticmethod def get_storage_factory(storage_type: str) -> type[BaseStorage]: @@ -72,7 +73,7 @@ class Storage: logging.exception("Failed to save file: %s", e) raise e - def load(self, filename: str, stream: bool = False) -> Union[bytes, Generator]: + def load(self, filename: str, /, *, stream: bool = False) -> Union[bytes, Generator]: try: if stream: return self.load_stream(filename) diff --git a/api/extensions/storage/aliyun_oss_storage.py b/api/extensions/storage/aliyun_oss_storage.py index ae6911e945..67635b129e 100644 --- a/api/extensions/storage/aliyun_oss_storage.py +++ b/api/extensions/storage/aliyun_oss_storage.py @@ -1,29 +1,27 @@ from collections.abc import Generator import oss2 as aliyun_s3 -from flask import Flask +from configs import dify_config from extensions.storage.base_storage import BaseStorage class AliyunOssStorage(BaseStorage): """Implementation for Aliyun OSS storage.""" - def __init__(self, app: Flask): - super().__init__(app) - - app_config = self.app.config - self.bucket_name = app_config.get("ALIYUN_OSS_BUCKET_NAME") - self.folder = app.config.get("ALIYUN_OSS_PATH") + def __init__(self): + super().__init__() + self.bucket_name = dify_config.ALIYUN_OSS_BUCKET_NAME + self.folder = dify_config.ALIYUN_OSS_PATH oss_auth_method = aliyun_s3.Auth region = None - if app_config.get("ALIYUN_OSS_AUTH_VERSION") == "v4": + if dify_config.ALIYUN_OSS_AUTH_VERSION == "v4": oss_auth_method = aliyun_s3.AuthV4 - region = app_config.get("ALIYUN_OSS_REGION") - oss_auth = oss_auth_method(app_config.get("ALIYUN_OSS_ACCESS_KEY"), app_config.get("ALIYUN_OSS_SECRET_KEY")) + region = dify_config.ALIYUN_OSS_REGION + oss_auth = oss_auth_method(dify_config.ALIYUN_OSS_ACCESS_KEY, dify_config.ALIYUN_OSS_SECRET_KEY) self.client = aliyun_s3.Bucket( oss_auth, - app_config.get("ALIYUN_OSS_ENDPOINT"), + dify_config.ALIYUN_OSS_ENDPOINT, self.bucket_name, connect_timeout=30, region=region, @@ -38,12 +36,9 @@ class AliyunOssStorage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - obj = self.client.get_object(self.__wrapper_folder_filename(filename)) - while chunk := obj.read(4096): - yield chunk - - return generate() + obj = self.client.get_object(self.__wrapper_folder_filename(filename)) + while chunk := obj.read(4096): + yield chunk def download(self, filename, target_filepath): self.client.get_object_to_file(self.__wrapper_folder_filename(filename), target_filepath) diff --git a/api/extensions/storage/aws_s3_storage.py b/api/extensions/storage/aws_s3_storage.py index 507a303223..ab2d0fba3b 100644 --- a/api/extensions/storage/aws_s3_storage.py +++ b/api/extensions/storage/aws_s3_storage.py @@ -4,8 +4,8 @@ from collections.abc import Generator import boto3 from botocore.client import Config from botocore.exceptions import ClientError -from flask import Flask +from configs import dify_config from extensions.storage.base_storage import BaseStorage logger = logging.getLogger(__name__) @@ -14,26 +14,25 @@ logger = logging.getLogger(__name__) class AwsS3Storage(BaseStorage): """Implementation for Amazon Web Services S3 storage.""" - def __init__(self, app: Flask): - super().__init__(app) - app_config = self.app.config - self.bucket_name = app_config.get("S3_BUCKET_NAME") - if app_config.get("S3_USE_AWS_MANAGED_IAM"): + def __init__(self): + super().__init__() + self.bucket_name = dify_config.S3_BUCKET_NAME + if dify_config.S3_USE_AWS_MANAGED_IAM: logger.info("Using AWS managed IAM role for S3") session = boto3.Session() - region_name = app_config.get("S3_REGION") + region_name = dify_config.S3_REGION self.client = session.client(service_name="s3", region_name=region_name) else: logger.info("Using ak and sk for S3") self.client = boto3.client( "s3", - aws_secret_access_key=app_config.get("S3_SECRET_KEY"), - aws_access_key_id=app_config.get("S3_ACCESS_KEY"), - endpoint_url=app_config.get("S3_ENDPOINT"), - region_name=app_config.get("S3_REGION"), - config=Config(s3={"addressing_style": app_config.get("S3_ADDRESS_STYLE")}), + aws_secret_access_key=dify_config.S3_SECRET_KEY, + aws_access_key_id=dify_config.S3_ACCESS_KEY, + endpoint_url=dify_config.S3_ENDPOINT, + region_name=dify_config.S3_REGION, + config=Config(s3={"addressing_style": dify_config.S3_ADDRESS_STYLE}), ) # create bucket try: @@ -63,17 +62,14 @@ class AwsS3Storage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - try: - response = self.client.get_object(Bucket=self.bucket_name, Key=filename) - yield from response["Body"].iter_chunks() - except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": - raise FileNotFoundError("File not found") - else: - raise - - return generate() + try: + response = self.client.get_object(Bucket=self.bucket_name, Key=filename) + yield from response["Body"].iter_chunks() + except ClientError as ex: + if ex.response["Error"]["Code"] == "NoSuchKey": + raise FileNotFoundError("File not found") + else: + raise def download(self, filename, target_filepath): self.client.download_file(self.bucket_name, filename, target_filepath) diff --git a/api/extensions/storage/azure_blob_storage.py b/api/extensions/storage/azure_blob_storage.py index daea660a49..11a7544274 100644 --- a/api/extensions/storage/azure_blob_storage.py +++ b/api/extensions/storage/azure_blob_storage.py @@ -2,8 +2,8 @@ from collections.abc import Generator from datetime import datetime, timedelta, timezone from azure.storage.blob import AccountSasPermissions, BlobServiceClient, ResourceTypes, generate_account_sas -from flask import Flask +from configs import dify_config from extensions.ext_redis import redis_client from extensions.storage.base_storage import BaseStorage @@ -11,13 +11,12 @@ from extensions.storage.base_storage import BaseStorage class AzureBlobStorage(BaseStorage): """Implementation for Azure Blob storage.""" - def __init__(self, app: Flask): - super().__init__(app) - app_config = self.app.config - self.bucket_name = app_config.get("AZURE_BLOB_CONTAINER_NAME") - self.account_url = app_config.get("AZURE_BLOB_ACCOUNT_URL") - self.account_name = app_config.get("AZURE_BLOB_ACCOUNT_NAME") - self.account_key = app_config.get("AZURE_BLOB_ACCOUNT_KEY") + def __init__(self): + super().__init__() + self.bucket_name = dify_config.AZURE_BLOB_CONTAINER_NAME + self.account_url = dify_config.AZURE_BLOB_ACCOUNT_URL + self.account_name = dify_config.AZURE_BLOB_ACCOUNT_NAME + self.account_key = dify_config.AZURE_BLOB_ACCOUNT_KEY def save(self, filename, data): client = self._sync_client() @@ -33,13 +32,9 @@ class AzureBlobStorage(BaseStorage): def load_stream(self, filename: str) -> Generator: client = self._sync_client() - - def generate(filename: str = filename) -> Generator: - blob = client.get_blob_client(container=self.bucket_name, blob=filename) - blob_data = blob.download_blob() - yield from blob_data.chunks() - - return generate(filename) + blob = client.get_blob_client(container=self.bucket_name, blob=filename) + blob_data = blob.download_blob() + yield from blob_data.chunks() def download(self, filename, target_filepath): client = self._sync_client() diff --git a/api/extensions/storage/baidu_obs_storage.py b/api/extensions/storage/baidu_obs_storage.py index c5acff4a9d..e0d2140e91 100644 --- a/api/extensions/storage/baidu_obs_storage.py +++ b/api/extensions/storage/baidu_obs_storage.py @@ -5,24 +5,23 @@ from collections.abc import Generator from baidubce.auth.bce_credentials import BceCredentials from baidubce.bce_client_configuration import BceClientConfiguration from baidubce.services.bos.bos_client import BosClient -from flask import Flask +from configs import dify_config from extensions.storage.base_storage import BaseStorage class BaiduObsStorage(BaseStorage): """Implementation for Baidu OBS storage.""" - def __init__(self, app: Flask): - super().__init__(app) - app_config = self.app.config - self.bucket_name = app_config.get("BAIDU_OBS_BUCKET_NAME") + def __init__(self): + super().__init__() + self.bucket_name = dify_config.BAIDU_OBS_BUCKET_NAME client_config = BceClientConfiguration( credentials=BceCredentials( - access_key_id=app_config.get("BAIDU_OBS_ACCESS_KEY"), - secret_access_key=app_config.get("BAIDU_OBS_SECRET_KEY"), + access_key_id=dify_config.BAIDU_OBS_ACCESS_KEY, + secret_access_key=dify_config.BAIDU_OBS_SECRET_KEY, ), - endpoint=app_config.get("BAIDU_OBS_ENDPOINT"), + endpoint=dify_config.BAIDU_OBS_ENDPOINT, ) self.client = BosClient(config=client_config) @@ -40,12 +39,9 @@ class BaiduObsStorage(BaseStorage): return response.data.read() def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - response = self.client.get_object(bucket_name=self.bucket_name, key=filename).data - while chunk := response.read(4096): - yield chunk - - return generate() + response = self.client.get_object(bucket_name=self.bucket_name, key=filename).data + while chunk := response.read(4096): + yield chunk def download(self, filename, target_filepath): self.client.get_object_to_file(bucket_name=self.bucket_name, key=filename, file_name=target_filepath) diff --git a/api/extensions/storage/base_storage.py b/api/extensions/storage/base_storage.py index c3fe9ec82a..50abab8537 100644 --- a/api/extensions/storage/base_storage.py +++ b/api/extensions/storage/base_storage.py @@ -3,16 +3,12 @@ from abc import ABC, abstractmethod from collections.abc import Generator -from flask import Flask - class BaseStorage(ABC): """Interface for file storage.""" - app = None - - def __init__(self, app: Flask): - self.app = app + def __init__(self): # noqa: B027 + pass @abstractmethod def save(self, filename, data): diff --git a/api/extensions/storage/google_cloud_storage.py b/api/extensions/storage/google_cloud_storage.py index 2d1224fd74..26b662d2f0 100644 --- a/api/extensions/storage/google_cloud_storage.py +++ b/api/extensions/storage/google_cloud_storage.py @@ -3,20 +3,20 @@ import io import json from collections.abc import Generator -from flask import Flask from google.cloud import storage as google_cloud_storage +from configs import dify_config from extensions.storage.base_storage import BaseStorage class GoogleCloudStorage(BaseStorage): """Implementation for Google Cloud storage.""" - def __init__(self, app: Flask): - super().__init__(app) - app_config = self.app.config - self.bucket_name = app_config.get("GOOGLE_STORAGE_BUCKET_NAME") - service_account_json_str = app_config.get("GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64") + def __init__(self): + super().__init__() + + self.bucket_name = dify_config.GOOGLE_STORAGE_BUCKET_NAME + service_account_json_str = dify_config.GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64 # if service_account_json_str is empty, use Application Default Credentials if service_account_json_str: service_account_json = base64.b64decode(service_account_json_str).decode("utf-8") @@ -39,14 +39,11 @@ class GoogleCloudStorage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - bucket = self.client.get_bucket(self.bucket_name) - blob = bucket.get_blob(filename) - with blob.open(mode="rb") as blob_stream: - while chunk := blob_stream.read(4096): - yield chunk - - return generate() + bucket = self.client.get_bucket(self.bucket_name) + blob = bucket.get_blob(filename) + with blob.open(mode="rb") as blob_stream: + while chunk := blob_stream.read(4096): + yield chunk def download(self, filename, target_filepath): bucket = self.client.get_bucket(self.bucket_name) diff --git a/api/extensions/storage/huawei_obs_storage.py b/api/extensions/storage/huawei_obs_storage.py index dd243d4001..20be70ef83 100644 --- a/api/extensions/storage/huawei_obs_storage.py +++ b/api/extensions/storage/huawei_obs_storage.py @@ -1,22 +1,22 @@ from collections.abc import Generator -from flask import Flask from obs import ObsClient +from configs import dify_config from extensions.storage.base_storage import BaseStorage class HuaweiObsStorage(BaseStorage): """Implementation for Huawei OBS storage.""" - def __init__(self, app: Flask): - super().__init__(app) - app_config = self.app.config - self.bucket_name = app_config.get("HUAWEI_OBS_BUCKET_NAME") + def __init__(self): + super().__init__() + + self.bucket_name = dify_config.HUAWEI_OBS_BUCKET_NAME self.client = ObsClient( - access_key_id=app_config.get("HUAWEI_OBS_ACCESS_KEY"), - secret_access_key=app_config.get("HUAWEI_OBS_SECRET_KEY"), - server=app_config.get("HUAWEI_OBS_SERVER"), + access_key_id=dify_config.HUAWEI_OBS_ACCESS_KEY, + secret_access_key=dify_config.HUAWEI_OBS_SECRET_KEY, + server=dify_config.HUAWEI_OBS_SERVER, ) def save(self, filename, data): @@ -27,12 +27,9 @@ class HuaweiObsStorage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - response = self.client.getObject(bucketName=self.bucket_name, objectKey=filename)["body"].response - while chunk := response.read(4096): - yield chunk - - return generate() + response = self.client.getObject(bucketName=self.bucket_name, objectKey=filename)["body"].response + while chunk := response.read(4096): + yield chunk def download(self, filename, target_filepath): self.client.getObject(bucketName=self.bucket_name, objectKey=filename, downloadPath=target_filepath) diff --git a/api/extensions/storage/local_fs_storage.py b/api/extensions/storage/local_fs_storage.py index 9308c4d180..5a495ca4d4 100644 --- a/api/extensions/storage/local_fs_storage.py +++ b/api/extensions/storage/local_fs_storage.py @@ -3,83 +3,60 @@ import shutil from collections.abc import Generator from pathlib import Path -from flask import Flask +from flask import current_app +from configs import dify_config from extensions.storage.base_storage import BaseStorage class LocalFsStorage(BaseStorage): """Implementation for local filesystem storage.""" - def __init__(self, app: Flask): - super().__init__(app) - folder = self.app.config.get("STORAGE_LOCAL_PATH") + def __init__(self): + super().__init__() + folder = dify_config.STORAGE_LOCAL_PATH if not os.path.isabs(folder): - folder = os.path.join(app.root_path, folder) + folder = os.path.join(current_app.root_path, folder) self.folder = folder - def save(self, filename, data): + def _build_filepath(self, filename: str) -> str: + """Build the full file path based on the folder and filename.""" if not self.folder or self.folder.endswith("/"): - filename = self.folder + filename + return self.folder + filename else: - filename = self.folder + "/" + filename + return self.folder + "/" + filename - folder = os.path.dirname(filename) + def save(self, filename, data): + filepath = self._build_filepath(filename) + folder = os.path.dirname(filepath) os.makedirs(folder, exist_ok=True) - - Path(os.path.join(os.getcwd(), filename)).write_bytes(data) + Path(os.path.join(os.getcwd(), filepath)).write_bytes(data) def load_once(self, filename: str) -> bytes: - if not self.folder or self.folder.endswith("/"): - filename = self.folder + filename - else: - filename = self.folder + "/" + filename - - if not os.path.exists(filename): + filepath = self._build_filepath(filename) + if not os.path.exists(filepath): raise FileNotFoundError("File not found") - - data = Path(filename).read_bytes() - return data + return Path(filepath).read_bytes() def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - if not self.folder or self.folder.endswith("/"): - filename = self.folder + filename - else: - filename = self.folder + "/" + filename - - if not os.path.exists(filename): - raise FileNotFoundError("File not found") - - with open(filename, "rb") as f: - while chunk := f.read(4096): # Read in chunks of 4KB - yield chunk - - return generate() + filepath = self._build_filepath(filename) + if not os.path.exists(filepath): + raise FileNotFoundError("File not found") + with open(filepath, "rb") as f: + while chunk := f.read(4096): # Read in chunks of 4KB + yield chunk def download(self, filename, target_filepath): - if not self.folder or self.folder.endswith("/"): - filename = self.folder + filename - else: - filename = self.folder + "/" + filename - - if not os.path.exists(filename): + filepath = self._build_filepath(filename) + if not os.path.exists(filepath): raise FileNotFoundError("File not found") - - shutil.copyfile(filename, target_filepath) + shutil.copyfile(filepath, target_filepath) def exists(self, filename): - if not self.folder or self.folder.endswith("/"): - filename = self.folder + filename - else: - filename = self.folder + "/" + filename - - return os.path.exists(filename) + filepath = self._build_filepath(filename) + return os.path.exists(filepath) def delete(self, filename): - if not self.folder or self.folder.endswith("/"): - filename = self.folder + filename - else: - filename = self.folder + "/" + filename - if os.path.exists(filename): - os.remove(filename) + filepath = self._build_filepath(filename) + if os.path.exists(filepath): + os.remove(filepath) diff --git a/api/extensions/storage/oracle_oci_storage.py b/api/extensions/storage/oracle_oci_storage.py index 5295dbdca2..b59f83b8de 100644 --- a/api/extensions/storage/oracle_oci_storage.py +++ b/api/extensions/storage/oracle_oci_storage.py @@ -2,24 +2,24 @@ from collections.abc import Generator import boto3 from botocore.exceptions import ClientError -from flask import Flask +from configs import dify_config from extensions.storage.base_storage import BaseStorage class OracleOCIStorage(BaseStorage): """Implementation for Oracle OCI storage.""" - def __init__(self, app: Flask): - super().__init__(app) - app_config = self.app.config - self.bucket_name = app_config.get("OCI_BUCKET_NAME") + def __init__(self): + super().__init__() + + self.bucket_name = dify_config.OCI_BUCKET_NAME self.client = boto3.client( "s3", - aws_secret_access_key=app_config.get("OCI_SECRET_KEY"), - aws_access_key_id=app_config.get("OCI_ACCESS_KEY"), - endpoint_url=app_config.get("OCI_ENDPOINT"), - region_name=app_config.get("OCI_REGION"), + aws_secret_access_key=dify_config.OCI_SECRET_KEY, + aws_access_key_id=dify_config.OCI_ACCESS_KEY, + endpoint_url=dify_config.OCI_ENDPOINT, + region_name=dify_config.OCI_REGION, ) def save(self, filename, data): @@ -36,17 +36,14 @@ class OracleOCIStorage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - try: - response = self.client.get_object(Bucket=self.bucket_name, Key=filename) - yield from response["Body"].iter_chunks() - except ClientError as ex: - if ex.response["Error"]["Code"] == "NoSuchKey": - raise FileNotFoundError("File not found") - else: - raise - - return generate() + try: + response = self.client.get_object(Bucket=self.bucket_name, Key=filename) + yield from response["Body"].iter_chunks() + except ClientError as ex: + if ex.response["Error"]["Code"] == "NoSuchKey": + raise FileNotFoundError("File not found") + else: + raise def download(self, filename, target_filepath): self.client.download_file(self.bucket_name, filename, target_filepath) diff --git a/api/extensions/storage/supabase_storage.py b/api/extensions/storage/supabase_storage.py index 1e399f87c8..9f7c69a9ae 100644 --- a/api/extensions/storage/supabase_storage.py +++ b/api/extensions/storage/supabase_storage.py @@ -2,25 +2,27 @@ import io from collections.abc import Generator from pathlib import Path -from flask import Flask from supabase import Client +from configs import dify_config from extensions.storage.base_storage import BaseStorage class SupabaseStorage(BaseStorage): """Implementation for supabase obs storage.""" - def __init__(self, app: Flask): - super().__init__(app) - app_config = self.app.config - self.bucket_name = app_config.get("SUPABASE_BUCKET_NAME") - self.client = Client( - supabase_url=app_config.get("SUPABASE_URL"), supabase_key=app_config.get("SUPABASE_API_KEY") - ) - self.create_bucket( - id=app_config.get("SUPABASE_BUCKET_NAME"), bucket_name=app_config.get("SUPABASE_BUCKET_NAME") - ) + def __init__(self): + super().__init__() + if dify_config.SUPABASE_URL is None: + raise ValueError("SUPABASE_URL is not set") + if dify_config.SUPABASE_API_KEY is None: + raise ValueError("SUPABASE_API_KEY is not set") + if dify_config.SUPABASE_BUCKET_NAME is None: + raise ValueError("SUPABASE_BUCKET_NAME is not set") + + self.bucket_name = dify_config.SUPABASE_BUCKET_NAME + self.client = Client(supabase_url=dify_config.SUPABASE_URL, supabase_key=dify_config.SUPABASE_API_KEY) + self.create_bucket(id=dify_config.SUPABASE_BUCKET_NAME, bucket_name=dify_config.SUPABASE_BUCKET_NAME) def create_bucket(self, id, bucket_name): if not self.bucket_exists(): @@ -34,17 +36,14 @@ class SupabaseStorage(BaseStorage): return content def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - result = self.client.storage.from_(self.bucket_name).download(filename) - byte_stream = io.BytesIO(result) - while chunk := byte_stream.read(4096): # Read in chunks of 4KB - yield chunk - - return generate() + result = self.client.storage.from_(self.bucket_name).download(filename) + byte_stream = io.BytesIO(result) + while chunk := byte_stream.read(4096): # Read in chunks of 4KB + yield chunk def download(self, filename, target_filepath): result = self.client.storage.from_(self.bucket_name).download(filename) - Path(result).write_bytes(result) + Path(target_filepath).write_bytes(result) def exists(self, filename): result = self.client.storage.from_(self.bucket_name).list(filename) diff --git a/api/extensions/storage/tencent_cos_storage.py b/api/extensions/storage/tencent_cos_storage.py index c529dce7ad..13a6c9239c 100644 --- a/api/extensions/storage/tencent_cos_storage.py +++ b/api/extensions/storage/tencent_cos_storage.py @@ -1,23 +1,23 @@ from collections.abc import Generator -from flask import Flask from qcloud_cos import CosConfig, CosS3Client +from configs import dify_config from extensions.storage.base_storage import BaseStorage class TencentCosStorage(BaseStorage): """Implementation for Tencent Cloud COS storage.""" - def __init__(self, app: Flask): - super().__init__(app) - app_config = self.app.config - self.bucket_name = app_config.get("TENCENT_COS_BUCKET_NAME") + def __init__(self): + super().__init__() + + self.bucket_name = dify_config.TENCENT_COS_BUCKET_NAME config = CosConfig( - Region=app_config.get("TENCENT_COS_REGION"), - SecretId=app_config.get("TENCENT_COS_SECRET_ID"), - SecretKey=app_config.get("TENCENT_COS_SECRET_KEY"), - Scheme=app_config.get("TENCENT_COS_SCHEME"), + Region=dify_config.TENCENT_COS_REGION, + SecretId=dify_config.TENCENT_COS_SECRET_ID, + SecretKey=dify_config.TENCENT_COS_SECRET_KEY, + Scheme=dify_config.TENCENT_COS_SCHEME, ) self.client = CosS3Client(config) @@ -29,11 +29,8 @@ class TencentCosStorage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - response = self.client.get_object(Bucket=self.bucket_name, Key=filename) - yield from response["Body"].get_stream(chunk_size=4096) - - return generate() + response = self.client.get_object(Bucket=self.bucket_name, Key=filename) + yield from response["Body"].get_stream(chunk_size=4096) def download(self, filename, target_filepath): response = self.client.get_object(Bucket=self.bucket_name, Key=filename) diff --git a/api/extensions/storage/volcengine_tos_storage.py b/api/extensions/storage/volcengine_tos_storage.py index 1bedcf24c2..de82be04ea 100644 --- a/api/extensions/storage/volcengine_tos_storage.py +++ b/api/extensions/storage/volcengine_tos_storage.py @@ -1,23 +1,22 @@ from collections.abc import Generator import tos -from flask import Flask +from configs import dify_config from extensions.storage.base_storage import BaseStorage class VolcengineTosStorage(BaseStorage): """Implementation for Volcengine TOS storage.""" - def __init__(self, app: Flask): - super().__init__(app) - app_config = self.app.config - self.bucket_name = app_config.get("VOLCENGINE_TOS_BUCKET_NAME") + def __init__(self): + super().__init__() + self.bucket_name = dify_config.VOLCENGINE_TOS_BUCKET_NAME self.client = tos.TosClientV2( - ak=app_config.get("VOLCENGINE_TOS_ACCESS_KEY"), - sk=app_config.get("VOLCENGINE_TOS_SECRET_KEY"), - endpoint=app_config.get("VOLCENGINE_TOS_ENDPOINT"), - region=app_config.get("VOLCENGINE_TOS_REGION"), + ak=dify_config.VOLCENGINE_TOS_ACCESS_KEY, + sk=dify_config.VOLCENGINE_TOS_SECRET_KEY, + endpoint=dify_config.VOLCENGINE_TOS_ENDPOINT, + region=dify_config.VOLCENGINE_TOS_REGION, ) def save(self, filename, data): @@ -28,12 +27,9 @@ class VolcengineTosStorage(BaseStorage): return data def load_stream(self, filename: str) -> Generator: - def generate(filename: str = filename) -> Generator: - response = self.client.get_object(bucket=self.bucket_name, key=filename) - while chunk := response.read(4096): - yield chunk - - return generate() + response = self.client.get_object(bucket=self.bucket_name, key=filename) + while chunk := response.read(4096): + yield chunk def download(self, filename, target_filepath): self.client.get_object_to_file(bucket=self.bucket_name, key=filename, file_path=target_filepath) diff --git a/api/fields/conversation_fields.py b/api/fields/conversation_fields.py index 3dcd88d1de..bf1c491a05 100644 --- a/api/fields/conversation_fields.py +++ b/api/fields/conversation_fields.py @@ -3,6 +3,8 @@ from flask_restful import fields from fields.member_fields import simple_account_fields from libs.helper import TimestampField +from .raws import FilesContainedField + class MessageTextField(fields.Raw): def format(self, value): @@ -33,8 +35,12 @@ annotation_hit_history_fields = { message_file_fields = { "id": fields.String, + "filename": fields.String, "type": fields.String, "url": fields.String, + "mime_type": fields.String, + "size": fields.Integer, + "transfer_method": fields.String, "belongs_to": fields.String(default="user"), } @@ -55,7 +61,7 @@ agent_thought_fields = { message_detail_fields = { "id": fields.String, "conversation_id": fields.String, - "inputs": fields.Raw, + "inputs": FilesContainedField, "query": fields.String, "message": fields.Raw, "message_tokens": fields.Integer, @@ -71,7 +77,7 @@ message_detail_fields = { "annotation_hit_history": fields.Nested(annotation_hit_history_fields, allow_null=True), "created_at": TimestampField, "agent_thoughts": fields.List(fields.Nested(agent_thought_fields)), - "message_files": fields.List(fields.Nested(message_file_fields), attribute="files"), + "message_files": fields.List(fields.Nested(message_file_fields)), "metadata": fields.Raw(attribute="message_metadata_dict"), "status": fields.String, "error": fields.String, @@ -99,7 +105,7 @@ simple_model_config_fields = { } simple_message_detail_fields = { - "inputs": fields.Raw, + "inputs": FilesContainedField, "query": fields.String, "message": MessageTextField, "answer": fields.String, @@ -187,7 +193,7 @@ conversation_detail_fields = { simple_conversation_fields = { "id": fields.String, "name": fields.String, - "inputs": fields.Raw, + "inputs": FilesContainedField, "status": fields.String, "introduction": fields.String, "created_at": TimestampField, diff --git a/api/fields/file_fields.py b/api/fields/file_fields.py index e5a03ce77e..9ff1111b74 100644 --- a/api/fields/file_fields.py +++ b/api/fields/file_fields.py @@ -6,6 +6,8 @@ upload_config_fields = { "file_size_limit": fields.Integer, "batch_count_limit": fields.Integer, "image_file_size_limit": fields.Integer, + "video_file_size_limit": fields.Integer, + "audio_file_size_limit": fields.Integer, } file_fields = { @@ -17,3 +19,8 @@ file_fields = { "created_by": fields.String, "created_at": TimestampField, } + +remote_file_info_fields = { + "file_type": fields.String(attribute="file_type"), + "file_length": fields.Integer(attribute="file_length"), +} diff --git a/api/fields/message_fields.py b/api/fields/message_fields.py index c938097131..5f6e7884a6 100644 --- a/api/fields/message_fields.py +++ b/api/fields/message_fields.py @@ -3,6 +3,8 @@ from flask_restful import fields from fields.conversation_fields import message_file_fields from libs.helper import TimestampField +from .raws import FilesContainedField + feedback_fields = {"rating": fields.String} retriever_resource_fields = { @@ -63,14 +65,14 @@ message_fields = { "id": fields.String, "conversation_id": fields.String, "parent_message_id": fields.String, - "inputs": fields.Raw, + "inputs": FilesContainedField, "query": fields.String, "answer": fields.String(attribute="re_sign_file_url_answer"), "feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True), "retriever_resources": fields.List(fields.Nested(retriever_resource_fields)), "created_at": TimestampField, "agent_thoughts": fields.List(fields.Nested(agent_thought_fields)), - "message_files": fields.List(fields.Nested(message_file_fields), attribute="files"), + "message_files": fields.List(fields.Nested(message_file_fields)), "status": fields.String, "error": fields.String, } diff --git a/api/fields/workflow_fields.py b/api/fields/workflow_fields.py index 2adef63ada..0d860d6f40 100644 --- a/api/fields/workflow_fields.py +++ b/api/fields/workflow_fields.py @@ -1,7 +1,7 @@ from flask_restful import fields -from core.app.segments import SecretVariable, SegmentType, Variable from core.helper import encrypter +from core.variables import SecretVariable, SegmentType, Variable from fields.member_fields import simple_account_fields from libs.helper import TimestampField diff --git a/api/libs/helper.py b/api/libs/helper.py index d8a8e7f411..7638796508 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -12,11 +12,12 @@ from hashlib import sha256 from typing import Any, Optional, Union from zoneinfo import available_timezones -from flask import Response, current_app, stream_with_context +from flask import Response, stream_with_context from flask_restful import fields +from configs import dify_config from core.app.features.rate_limiting.rate_limit import RateLimitGenerator -from core.file.upload_file_parser import UploadFileParser +from core.file import helpers as file_helpers from extensions.ext_redis import redis_client from models.account import Account @@ -33,7 +34,7 @@ class AppIconUrlField(fields.Raw): from models.model import IconType if obj.icon_type == IconType.IMAGE.value: - return UploadFileParser.get_signed_temp_image_url(obj.icon) + return file_helpers.get_signed_file_url(obj.icon) return None @@ -189,23 +190,39 @@ def compact_generate_response(response: Union[dict, RateLimitGenerator]) -> Resp class TokenManager: @classmethod - def generate_token(cls, account: Account, token_type: str, additional_data: Optional[dict] = None) -> str: - old_token = cls._get_current_token_for_account(account.id, token_type) - if old_token: - if isinstance(old_token, bytes): - old_token = old_token.decode("utf-8") - cls.revoke_token(old_token, token_type) + def generate_token( + cls, + token_type: str, + account: Optional[Account] = None, + email: Optional[str] = None, + additional_data: Optional[dict] = None, + ) -> str: + if account is None and email is None: + raise ValueError("Account or email must be provided") + + account_id = account.id if account else None + account_email = account.email if account else email + + if account_id: + old_token = cls._get_current_token_for_account(account_id, token_type) + if old_token: + if isinstance(old_token, bytes): + old_token = old_token.decode("utf-8") + cls.revoke_token(old_token, token_type) token = str(uuid.uuid4()) - token_data = {"account_id": account.id, "email": account.email, "token_type": token_type} + token_data = {"account_id": account_id, "email": account_email, "token_type": token_type} if additional_data: token_data.update(additional_data) - expiry_hours = current_app.config[f"{token_type.upper()}_TOKEN_EXPIRY_HOURS"] + expiry_minutes = dify_config.model_dump().get(f"{token_type.upper()}_TOKEN_EXPIRY_MINUTES") token_key = cls._get_token_key(token, token_type) - redis_client.setex(token_key, expiry_hours * 60 * 60, json.dumps(token_data)) + expiry_time = int(expiry_minutes * 60) + redis_client.setex(token_key, expiry_time, json.dumps(token_data)) + + if account_id: + cls._set_current_token_for_account(account.id, token, token_type, expiry_minutes) - cls._set_current_token_for_account(account.id, token, token_type, expiry_hours) return token @classmethod @@ -234,9 +251,12 @@ class TokenManager: return current_token @classmethod - def _set_current_token_for_account(cls, account_id: str, token: str, token_type: str, expiry_hours: int): + def _set_current_token_for_account( + cls, account_id: str, token: str, token_type: str, expiry_hours: Union[int, float] + ): key = cls._get_account_token_key(account_id, token_type) - redis_client.setex(key, expiry_hours * 60 * 60, token) + expiry_time = int(expiry_hours * 60 * 60) + redis_client.setex(key, expiry_time, token) @classmethod def _get_account_token_key(cls, account_id: str, token_type: str) -> str: diff --git a/api/libs/login.py b/api/libs/login.py index 7f05eb8404..0ea191a185 100644 --- a/api/libs/login.py +++ b/api/libs/login.py @@ -1,4 +1,3 @@ -import os from functools import wraps from flask import current_app, g, has_request_context, request @@ -7,6 +6,7 @@ from flask_login.config import EXEMPT_METHODS from werkzeug.exceptions import Unauthorized from werkzeug.local import LocalProxy +from configs import dify_config from extensions.ext_database import db from models.account import Account, Tenant, TenantAccountJoin @@ -52,8 +52,7 @@ def login_required(func): @wraps(func) def decorated_view(*args, **kwargs): auth_header = request.headers.get("Authorization") - admin_api_key_enable = os.getenv("ADMIN_API_KEY_ENABLE", default="False") - if admin_api_key_enable.lower() == "true": + if dify_config.ADMIN_API_KEY_ENABLE: if auth_header: if " " not in auth_header: raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") @@ -61,10 +60,10 @@ def login_required(func): auth_scheme = auth_scheme.lower() if auth_scheme != "bearer": raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - admin_api_key = os.getenv("ADMIN_API_KEY") + admin_api_key = dify_config.ADMIN_API_KEY if admin_api_key: - if os.getenv("ADMIN_API_KEY") == auth_token: + if admin_api_key == auth_token: workspace_id = request.headers.get("X-WORKSPACE-ID") if workspace_id: tenant_account_join = ( @@ -82,7 +81,7 @@ def login_required(func): account.current_tenant = tenant current_app.login_manager._update_request_context_with_user(account) user_logged_in.send(current_app._get_current_object(), user=_get_user()) - if request.method in EXEMPT_METHODS or current_app.config.get("LOGIN_DISABLED"): + if request.method in EXEMPT_METHODS or dify_config.LOGIN_DISABLED: pass elif not current_user.is_authenticated: return current_app.login_manager.unauthorized() diff --git a/api/libs/oauth.py b/api/libs/oauth.py index d8ce1a1e66..6b6919de24 100644 --- a/api/libs/oauth.py +++ b/api/libs/oauth.py @@ -1,5 +1,6 @@ import urllib.parse from dataclasses import dataclass +from typing import Optional import requests @@ -40,12 +41,14 @@ class GitHubOAuth(OAuth): _USER_INFO_URL = "https://api.github.com/user" _EMAIL_INFO_URL = "https://api.github.com/user/emails" - def get_authorization_url(self): + def get_authorization_url(self, invite_token: Optional[str] = None): params = { "client_id": self.client_id, "redirect_uri": self.redirect_uri, "scope": "user:email", # Request only basic user information } + if invite_token: + params["state"] = invite_token return f"{self._AUTH_URL}?{urllib.parse.urlencode(params)}" def get_access_token(self, code: str): @@ -90,13 +93,15 @@ class GoogleOAuth(OAuth): _TOKEN_URL = "https://oauth2.googleapis.com/token" _USER_INFO_URL = "https://www.googleapis.com/oauth2/v3/userinfo" - def get_authorization_url(self): + def get_authorization_url(self, invite_token: Optional[str] = None): params = { "client_id": self.client_id, "response_type": "code", "redirect_uri": self.redirect_uri, "scope": "openid email", } + if invite_token: + params["state"] = invite_token return f"{self._AUTH_URL}?{urllib.parse.urlencode(params)}" def get_access_token(self, code: str): diff --git a/api/libs/password.py b/api/libs/password.py index cfcc0db22d..cdf55c57e5 100644 --- a/api/libs/password.py +++ b/api/libs/password.py @@ -13,7 +13,7 @@ def valid_password(password): if re.match(pattern, password) is not None: return password - raise ValueError("Not a valid password.") + raise ValueError("Password must contain letters and numbers, and the length must be greater than 8.") def hash_password(password_str, salt_byte): diff --git a/api/migrations/versions/04c602f5dc9b_update_appmodelconfig_and_add_table_.py b/api/migrations/versions/04c602f5dc9b_update_appmodelconfig_and_add_table_.py index be2c615525..6a7402b16a 100644 --- a/api/migrations/versions/04c602f5dc9b_update_appmodelconfig_and_add_table_.py +++ b/api/migrations/versions/04c602f5dc9b_update_appmodelconfig_and_add_table_.py @@ -8,7 +8,7 @@ Create Date: 2024-06-12 07:49:07.666510 import sqlalchemy as sa from alembic import op -import models as models +import models.types # revision identifiers, used by Alembic. revision = '04c602f5dc9b' @@ -20,8 +20,8 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('tracing_app_configs', - sa.Column('id', models.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', models.StringUUID(), nullable=False), + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), sa.Column('tracing_provider', sa.String(length=255), nullable=True), sa.Column('tracing_config', sa.JSON(), nullable=True), sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), diff --git a/api/migrations/versions/2024_08_15_0956-0251a1c768cc_add_tidb_auth_binding.py b/api/migrations/versions/2024_08_15_0956-0251a1c768cc_add_tidb_auth_binding.py new file mode 100644 index 0000000000..ca2e410442 --- /dev/null +++ b/api/migrations/versions/2024_08_15_0956-0251a1c768cc_add_tidb_auth_binding.py @@ -0,0 +1,51 @@ +"""add-tidb-auth-binding + +Revision ID: 0251a1c768cc +Revises: 63a83fcf12ba +Create Date: 2024-08-15 09:56:59.012490 + +""" +import sqlalchemy as sa +from alembic import op + +import models as models + +# revision identifiers, used by Alembic. +revision = '0251a1c768cc' +down_revision = 'bbadea11becb' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('tidb_auth_bindings', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=True), + sa.Column('cluster_id', sa.String(length=255), nullable=False), + sa.Column('cluster_name', sa.String(length=255), nullable=False), + sa.Column('active', sa.Boolean(), server_default=sa.text('false'), nullable=False), + sa.Column('status', sa.String(length=255), server_default=sa.text("'CREATING'::character varying"), nullable=False), + sa.Column('account', sa.String(length=255), nullable=False), + sa.Column('password', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='tidb_auth_bindings_pkey') + ) + with op.batch_alter_table('tidb_auth_bindings', schema=None) as batch_op: + batch_op.create_index('tidb_auth_bindings_active_idx', ['active'], unique=False) + batch_op.create_index('tidb_auth_bindings_status_idx', ['status'], unique=False) + batch_op.create_index('tidb_auth_bindings_created_at_idx', ['created_at'], unique=False) + batch_op.create_index('tidb_auth_bindings_tenant_idx', ['tenant_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('tidb_auth_bindings', schema=None) as batch_op: + batch_op.drop_index('tidb_auth_bindings_tenant_idx') + batch_op.drop_index('tidb_auth_bindings_created_at_idx') + batch_op.drop_index('tidb_auth_bindings_active_idx') + batch_op.drop_index('tidb_auth_bindings_status_idx') + op.drop_table('tidb_auth_bindings') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_10_22_0959-43fa78bc3b7d_add_white_list.py b/api/migrations/versions/2024_10_22_0959-43fa78bc3b7d_add_white_list.py new file mode 100644 index 0000000000..9daf148bc4 --- /dev/null +++ b/api/migrations/versions/2024_10_22_0959-43fa78bc3b7d_add_white_list.py @@ -0,0 +1,42 @@ +"""add_white_list + +Revision ID: 43fa78bc3b7d +Revises: 0251a1c768cc +Create Date: 2024-10-22 09:59:23.713716 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '43fa78bc3b7d' +down_revision = '0251a1c768cc' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('whitelists', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=True), + sa.Column('category', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='whitelists_pkey') + ) + with op.batch_alter_table('whitelists', schema=None) as batch_op: + batch_op.create_index('whitelists_tenant_idx', ['tenant_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + with op.batch_alter_table('whitelists', schema=None) as batch_op: + batch_op.drop_index('whitelists_tenant_idx') + + op.drop_table('whitelists') + # ### end Alembic commands ### diff --git a/api/migrations/versions/3b18fea55204_add_tool_label_bings.py b/api/migrations/versions/3b18fea55204_add_tool_label_bings.py index db3119badf..bf54c247ea 100644 --- a/api/migrations/versions/3b18fea55204_add_tool_label_bings.py +++ b/api/migrations/versions/3b18fea55204_add_tool_label_bings.py @@ -8,7 +8,7 @@ Create Date: 2024-05-14 09:27:18.857890 import sqlalchemy as sa from alembic import op -import models as models +import models.types # revision identifiers, used by Alembic. revision = '3b18fea55204' @@ -20,7 +20,7 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('tool_label_bindings', - sa.Column('id', models.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('tool_id', sa.String(length=64), nullable=False), sa.Column('tool_type', sa.String(length=40), nullable=False), sa.Column('label_name', sa.String(length=40), nullable=False), diff --git a/api/migrations/versions/4e99a8df00ff_add_load_balancing.py b/api/migrations/versions/4e99a8df00ff_add_load_balancing.py index 67d7b9fbf5..3be4ba4f2a 100644 --- a/api/migrations/versions/4e99a8df00ff_add_load_balancing.py +++ b/api/migrations/versions/4e99a8df00ff_add_load_balancing.py @@ -8,7 +8,7 @@ Create Date: 2024-05-10 12:08:09.812736 import sqlalchemy as sa from alembic import op -import models as models +import models.types # revision identifiers, used by Alembic. revision = '4e99a8df00ff' @@ -20,8 +20,8 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('load_balancing_model_configs', - sa.Column('id', models.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.StringUUID(), nullable=False), + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), sa.Column('provider_name', sa.String(length=255), nullable=False), sa.Column('model_name', sa.String(length=255), nullable=False), sa.Column('model_type', sa.String(length=40), nullable=False), @@ -36,8 +36,8 @@ def upgrade(): batch_op.create_index('load_balancing_model_config_tenant_provider_model_idx', ['tenant_id', 'provider_name', 'model_type'], unique=False) op.create_table('provider_model_settings', - sa.Column('id', models.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.StringUUID(), nullable=False), + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), sa.Column('provider_name', sa.String(length=255), nullable=False), sa.Column('model_name', sa.String(length=255), nullable=False), sa.Column('model_type', sa.String(length=40), nullable=False), diff --git a/api/migrations/versions/7b45942e39bb_add_api_key_auth_binding.py b/api/migrations/versions/7b45942e39bb_add_api_key_auth_binding.py index f63bad9345..2ba0e13caa 100644 --- a/api/migrations/versions/7b45942e39bb_add_api_key_auth_binding.py +++ b/api/migrations/versions/7b45942e39bb_add_api_key_auth_binding.py @@ -8,7 +8,7 @@ Create Date: 2024-05-14 07:31:29.702766 import sqlalchemy as sa from alembic import op -import models as models +import models.types # revision identifiers, used by Alembic. revision = '7b45942e39bb' @@ -20,8 +20,8 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('data_source_api_key_auth_bindings', - sa.Column('id', models.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('tenant_id', models.StringUUID(), nullable=False), + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), sa.Column('category', sa.String(length=255), nullable=False), sa.Column('provider', sa.String(length=255), nullable=False), sa.Column('credentials', sa.Text(), nullable=True), diff --git a/api/migrations/versions/7bdef072e63a_add_workflow_tool.py b/api/migrations/versions/7bdef072e63a_add_workflow_tool.py index 67b61e5c76..f09a682f28 100644 --- a/api/migrations/versions/7bdef072e63a_add_workflow_tool.py +++ b/api/migrations/versions/7bdef072e63a_add_workflow_tool.py @@ -1,6 +1,6 @@ """add workflow tool -Revision ID: 7bdef072e63a +Revision ID: 7bdef072e63a Revises: 5fda94355fce Create Date: 2024-05-04 09:47:19.366961 @@ -8,7 +8,7 @@ Create Date: 2024-05-04 09:47:19.366961 import sqlalchemy as sa from alembic import op -import models as models +import models.types # revision identifiers, used by Alembic. revision = '7bdef072e63a' @@ -20,12 +20,12 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('tool_workflow_providers', - sa.Column('id', models.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), sa.Column('name', sa.String(length=40), nullable=False), sa.Column('icon', sa.String(length=255), nullable=False), - sa.Column('app_id', models.StringUUID(), nullable=False), - sa.Column('user_id', models.StringUUID(), nullable=False), - sa.Column('tenant_id', models.StringUUID(), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), + sa.Column('user_id', models.types.StringUUID(), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('parameter_configuration', sa.Text(), server_default='[]', nullable=False), sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), diff --git a/api/migrations/versions/7e6a8693e07a_add_table_dataset_permissions.py b/api/migrations/versions/7e6a8693e07a_add_table_dataset_permissions.py index ff53eb65a6..865572f3a7 100644 --- a/api/migrations/versions/7e6a8693e07a_add_table_dataset_permissions.py +++ b/api/migrations/versions/7e6a8693e07a_add_table_dataset_permissions.py @@ -8,7 +8,7 @@ Create Date: 2024-06-25 03:20:46.012193 import sqlalchemy as sa from alembic import op -import models as models +import models.types # revision identifiers, used by Alembic. revision = '7e6a8693e07a' @@ -20,9 +20,9 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('dataset_permissions', - sa.Column('id', models.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('dataset_id', models.StringUUID(), nullable=False), - sa.Column('account_id', models.StringUUID(), nullable=False), + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('account_id', models.types.StringUUID(), nullable=False), sa.Column('has_permission', sa.Boolean(), server_default=sa.text('true'), nullable=False), sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), sa.PrimaryKeyConstraint('id', name='dataset_permission_pkey') diff --git a/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py b/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py index 1ac44d083a..469c04338a 100644 --- a/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py +++ b/api/migrations/versions/c031d46af369_remove_app_model_config_trace_config_.py @@ -9,7 +9,7 @@ import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import postgresql -import models as models +import models.types # revision identifiers, used by Alembic. revision = 'c031d46af369' @@ -21,8 +21,8 @@ depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('trace_app_config', - sa.Column('id', models.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), - sa.Column('app_id', models.StringUUID(), nullable=False), + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_id', models.types.StringUUID(), nullable=False), sa.Column('tracing_provider', sa.String(length=255), nullable=True), sa.Column('tracing_config', sa.JSON(), nullable=True), sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), diff --git a/api/models/__init__.py b/api/models/__init__.py index 30ceef057e..1d8bae6cfa 100644 --- a/api/models/__init__.py +++ b/api/models/__init__.py @@ -1,29 +1,55 @@ -from enum import Enum +from .account import Account, AccountIntegrate, InvitationCode, Tenant +from .dataset import Dataset, DatasetProcessRule, Document, DocumentSegment +from .model import ( + ApiToken, + App, + AppMode, + Conversation, + EndUser, + FileUploadConfig, + InstalledApp, + Message, + MessageAnnotation, + MessageFile, + RecommendedApp, + Site, + UploadFile, +) +from .source import DataSourceOauthBinding +from .tools import ToolFile +from .workflow import ( + ConversationVariable, + Workflow, + WorkflowAppLog, + WorkflowRun, +) -from .model import App, AppMode, Message -from .types import StringUUID -from .workflow import ConversationVariable, Workflow, WorkflowNodeExecutionStatus - -__all__ = ["ConversationVariable", "StringUUID", "AppMode", "WorkflowNodeExecutionStatus", "Workflow", "App", "Message"] - - -class CreatedByRole(Enum): - """ - Enum class for createdByRole - """ - - ACCOUNT = "account" - END_USER = "end_user" - - @classmethod - def value_of(cls, value: str) -> "CreatedByRole": - """ - Get value of given mode. - - :param value: mode value - :return: mode - """ - for role in cls: - if role.value == value: - return role - raise ValueError(f"invalid createdByRole value {value}") +__all__ = [ + "ConversationVariable", + "Document", + "Dataset", + "DatasetProcessRule", + "DocumentSegment", + "DataSourceOauthBinding", + "AppMode", + "Workflow", + "App", + "Message", + "EndUser", + "MessageFile", + "UploadFile", + "Account", + "WorkflowAppLog", + "WorkflowRun", + "Site", + "InstalledApp", + "RecommendedApp", + "ApiToken", + "AccountIntegrate", + "InvitationCode", + "Tenant", + "Conversation", + "MessageAnnotation", + "FileUploadConfig", + "ToolFile", +] diff --git a/api/models/dataset.py b/api/models/dataset.py index 4224ee5e9c..a1a626d7e4 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -560,10 +560,12 @@ class DocumentSegment(db.Model): ) def get_sign_content(self): - pattern = r"/files/([a-f0-9\-]+)/image-preview" - text = self.content - matches = re.finditer(pattern, text) signed_urls = [] + text = self.content + + # For data before v0.10.0 + pattern = r"/files/([a-f0-9\-]+)/image-preview" + matches = re.finditer(pattern, text) for match in matches: upload_file_id = match.group(1) nonce = os.urandom(16).hex() @@ -577,6 +579,22 @@ class DocumentSegment(db.Model): signed_url = f"{match.group(0)}?{params}" signed_urls.append((match.start(), match.end(), signed_url)) + # For data after v0.10.0 + pattern = r"/files/([a-f0-9\-]+)/file-preview" + matches = re.finditer(pattern, text) + for match in matches: + upload_file_id = match.group(1) + nonce = os.urandom(16).hex() + timestamp = str(int(time.time())) + data_to_sign = f"file-preview|{upload_file_id}|{timestamp}|{nonce}" + secret_key = dify_config.SECRET_KEY.encode() if dify_config.SECRET_KEY else b"" + sign = hmac.new(secret_key, data_to_sign.encode(), hashlib.sha256).digest() + encoded_sign = base64.urlsafe_b64encode(sign).decode() + + params = f"timestamp={timestamp}&nonce={nonce}&sign={encoded_sign}" + signed_url = f"{match.group(0)}?{params}" + signed_urls.append((match.start(), match.end(), signed_url)) + # Reconstruct the text with signed URLs offset = 0 for start, end, signed_url in signed_urls: @@ -704,6 +722,38 @@ class DatasetCollectionBinding(db.Model): created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) +class TidbAuthBinding(db.Model): + __tablename__ = "tidb_auth_bindings" + __table_args__ = ( + db.PrimaryKeyConstraint("id", name="tidb_auth_bindings_pkey"), + db.Index("tidb_auth_bindings_tenant_idx", "tenant_id"), + db.Index("tidb_auth_bindings_active_idx", "active"), + db.Index("tidb_auth_bindings_created_at_idx", "created_at"), + db.Index("tidb_auth_bindings_status_idx", "status"), + ) + id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) + tenant_id = db.Column(StringUUID, nullable=True) + cluster_id = db.Column(db.String(255), nullable=False) + cluster_name = db.Column(db.String(255), nullable=False) + active = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) + status = db.Column(db.String(255), nullable=False, server_default=db.text("CREATING")) + account = db.Column(db.String(255), nullable=False) + password = db.Column(db.String(255), nullable=False) + created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + + +class Whitelist(db.Model): + __tablename__ = "whitelists" + __table_args__ = ( + db.PrimaryKeyConstraint("id", name="whitelists_pkey"), + db.Index("whitelists_tenant_idx", "tenant_id"), + ) + id = db.Column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) + tenant_id = db.Column(StringUUID, nullable=True) + category = db.Column(db.String(255), nullable=False) + created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + + class DatasetPermission(db.Model): __tablename__ = "dataset_permissions" __table_args__ = ( diff --git a/api/models/model.py b/api/models/model.py index 0ac9334321..3bd5886d75 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -1,24 +1,37 @@ import json import re import uuid +from collections.abc import Mapping, Sequence +from datetime import datetime from enum import Enum -from typing import Optional +from typing import Any, Literal, Optional from flask import request from flask_login import UserMixin +from pydantic import BaseModel, Field from sqlalchemy import Float, func, text from sqlalchemy.orm import Mapped, mapped_column from configs import dify_config +from core.file import FILE_MODEL_IDENTITY, File, FileExtraConfig, FileTransferMethod, FileType +from core.file import helpers as file_helpers from core.file.tool_file_parser import ToolFileParser -from core.file.upload_file_parser import UploadFileParser from extensions.ext_database import db from libs.helper import generate_string +from models.enums import CreatedByRole from .account import Account, Tenant from .types import StringUUID +class FileUploadConfig(BaseModel): + enabled: bool = Field(default=False) + allowed_file_types: Sequence[FileType] = Field(default_factory=list) + allowed_extensions: Sequence[str] = Field(default_factory=list) + allowed_upload_methods: Sequence[FileTransferMethod] = Field(default_factory=list) + number_limits: int = Field(default=0, gt=0, le=10) + + class DifySetup(db.Model): __tablename__ = "dify_setups" __table_args__ = (db.PrimaryKeyConstraint("version", name="dify_setup_pkey"),) @@ -27,7 +40,7 @@ class DifySetup(db.Model): setup_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) -class AppMode(Enum): +class AppMode(str, Enum): COMPLETION = "completion" WORKFLOW = "workflow" CHAT = "chat" @@ -59,7 +72,7 @@ class App(db.Model): __table_args__ = (db.PrimaryKeyConstraint("id", name="app_pkey"), db.Index("app_tenant_id_idx", "tenant_id")) id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) + tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False) name = db.Column(db.String(255), nullable=False) description = db.Column(db.Text, nullable=False, server_default=db.text("''::character varying")) mode = db.Column(db.String(255), nullable=False) @@ -530,7 +543,7 @@ class Conversation(db.Model): mode = db.Column(db.String(255), nullable=False) name = db.Column(db.String(255), nullable=False) summary = db.Column(db.Text) - inputs = db.Column(db.JSON) + _inputs: Mapped[dict] = mapped_column("inputs", db.JSON) introduction = db.Column(db.Text) system_instruction = db.Column(db.Text) system_instruction_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0")) @@ -552,6 +565,28 @@ class Conversation(db.Model): is_deleted = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) + @property + def inputs(self): + inputs = self._inputs.copy() + for key, value in inputs.items(): + if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY: + inputs[key] = File.model_validate(value) + elif isinstance(value, list) and all( + isinstance(item, dict) and item.get("dify_model_identity") == FILE_MODEL_IDENTITY for item in value + ): + inputs[key] = [File.model_validate(item) for item in value] + return inputs + + @inputs.setter + def inputs(self, value: Mapping[str, Any]): + inputs = dict(value) + for k, v in inputs.items(): + if isinstance(v, File): + inputs[k] = v.model_dump() + elif isinstance(v, list) and all(isinstance(item, File) for item in v): + inputs[k] = [item.model_dump() for item in v] + self._inputs = inputs + @property def model_config(self): model_config = {} @@ -700,13 +735,13 @@ class Message(db.Model): model_id = db.Column(db.String(255), nullable=True) override_model_configs = db.Column(db.Text) conversation_id = db.Column(StringUUID, db.ForeignKey("conversations.id"), nullable=False) - inputs = db.Column(db.JSON) - query = db.Column(db.Text, nullable=False) + _inputs: Mapped[dict] = mapped_column("inputs", db.JSON) + query: Mapped[str] = db.Column(db.Text, nullable=False) message = db.Column(db.JSON, nullable=False) message_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0")) message_unit_price = db.Column(db.Numeric(10, 4), nullable=False) message_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) - answer = db.Column(db.Text, nullable=False) + answer: Mapped[str] = db.Column(db.Text, nullable=False) answer_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0")) answer_unit_price = db.Column(db.Numeric(10, 4), nullable=False) answer_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) @@ -717,15 +752,37 @@ class Message(db.Model): status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying")) error = db.Column(db.Text) message_metadata = db.Column(db.Text) - invoke_from = db.Column(db.String(255), nullable=True) + invoke_from: Mapped[Optional[str]] = db.Column(db.String(255), nullable=True) from_source = db.Column(db.String(255), nullable=False) - from_end_user_id = db.Column(StringUUID) - from_account_id = db.Column(StringUUID) + from_end_user_id: Mapped[Optional[str]] = db.Column(StringUUID) + from_account_id: Mapped[Optional[str]] = db.Column(StringUUID) created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) agent_based = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) workflow_run_id = db.Column(StringUUID) + @property + def inputs(self): + inputs = self._inputs.copy() + for key, value in inputs.items(): + if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY: + inputs[key] = File.model_validate(value) + elif isinstance(value, list) and all( + isinstance(item, dict) and item.get("dify_model_identity") == FILE_MODEL_IDENTITY for item in value + ): + inputs[key] = [File.model_validate(item) for item in value] + return inputs + + @inputs.setter + def inputs(self, value: Mapping[str, Any]): + inputs = dict(value) + for k, v in inputs.items(): + if isinstance(v, File): + inputs[k] = v.model_dump() + elif isinstance(v, list) and all(isinstance(item, File) for item in v): + inputs[k] = [item.model_dump() for item in v] + self._inputs = inputs + @property def re_sign_file_url_answer(self) -> str: if not self.answer: @@ -772,19 +829,29 @@ class Message(db.Model): sign_url = ToolFileParser.get_tool_file_manager().sign_file( tool_file_id=tool_file_id, extension=extension ) - else: + elif "file-preview" in url: # get upload file id - upload_file_id_pattern = r"\/files\/([\w-]+)\/image-preview?\?timestamp=" + upload_file_id_pattern = r"\/files\/([\w-]+)\/file-preview?\?timestamp=" result = re.search(upload_file_id_pattern, url) if not result: continue upload_file_id = result.group(1) - if not upload_file_id: continue - - sign_url = UploadFileParser.get_signed_temp_image_url(upload_file_id) + sign_url = file_helpers.get_signed_file_url(upload_file_id) + elif "image-preview" in url: + # image-preview is deprecated, use file-preview instead + upload_file_id_pattern = r"\/files\/([\w-]+)\/image-preview?\?timestamp=" + result = re.search(upload_file_id_pattern, url) + if not result: + continue + upload_file_id = result.group(1) + if not upload_file_id: + continue + sign_url = file_helpers.get_signed_file_url(upload_file_id) + else: + continue re_sign_file_url_answer = re_sign_file_url_answer.replace(url, sign_url) @@ -870,50 +937,75 @@ class Message(db.Model): @property def message_files(self): - return db.session.query(MessageFile).filter(MessageFile.message_id == self.id).all() + from factories import file_factory - @property - def files(self): - message_files = self.message_files + message_files = db.session.query(MessageFile).filter(MessageFile.message_id == self.id).all() + current_app = db.session.query(App).filter(App.id == self.app_id).first() + if not current_app: + raise ValueError(f"App {self.app_id} not found") - files = [] + files: list[File] = [] for message_file in message_files: - url = message_file.url - if message_file.type == "image": - if message_file.transfer_method == "local_file": - upload_file = ( - db.session.query(UploadFile).filter(UploadFile.id == message_file.upload_file_id).first() - ) - - url = UploadFileParser.get_image_data(upload_file=upload_file, force_url=True) - if message_file.transfer_method == "tool_file": - # get tool file id - tool_file_id = message_file.url.split("/")[-1] - # trim extension - tool_file_id = tool_file_id.split(".")[0] - - # get extension - if "." in message_file.url: - extension = f'.{message_file.url.split(".")[-1]}' - if len(extension) > 10: - extension = ".bin" - else: - extension = ".bin" - # add sign url - url = ToolFileParser.get_tool_file_manager().sign_file( - tool_file_id=tool_file_id, extension=extension - ) - - files.append( - { + if message_file.transfer_method == "local_file": + if message_file.upload_file_id is None: + raise ValueError(f"MessageFile {message_file.id} is a local file but has no upload_file_id") + file = file_factory.build_from_mapping( + mapping={ + "id": message_file.id, + "upload_file_id": message_file.upload_file_id, + "transfer_method": message_file.transfer_method, + "type": message_file.type, + }, + tenant_id=current_app.tenant_id, + user_id=self.from_account_id or self.from_end_user_id or "", + role=CreatedByRole(message_file.created_by_role), + config=FileExtraConfig(), + ) + elif message_file.transfer_method == "remote_url": + if message_file.url is None: + raise ValueError(f"MessageFile {message_file.id} is a remote url but has no url") + file = file_factory.build_from_mapping( + mapping={ + "id": message_file.id, + "type": message_file.type, + "transfer_method": message_file.transfer_method, + "url": message_file.url, + }, + tenant_id=current_app.tenant_id, + user_id=self.from_account_id or self.from_end_user_id or "", + role=CreatedByRole(message_file.created_by_role), + config=FileExtraConfig(), + ) + elif message_file.transfer_method == "tool_file": + if message_file.upload_file_id is None: + assert message_file.url is not None + message_file.upload_file_id = message_file.url.split("/")[-1].split(".")[0] + mapping = { "id": message_file.id, "type": message_file.type, - "url": url, - "belongs_to": message_file.belongs_to or "user", + "transfer_method": message_file.transfer_method, + "tool_file_id": message_file.upload_file_id, } - ) + file = file_factory.build_from_mapping( + mapping=mapping, + tenant_id=current_app.tenant_id, + user_id=self.from_account_id or self.from_end_user_id or "", + role=CreatedByRole(message_file.created_by_role), + config=FileExtraConfig(), + ) + else: + raise ValueError( + f"MessageFile {message_file.id} has an invalid transfer_method {message_file.transfer_method}" + ) + files.append(file) - return files + result = [ + {"belongs_to": message_file.belongs_to, **file.to_dict()} + for (file, message_file) in zip(files, message_files) + ] + + db.session.commit() + return result @property def workflow_run(self): @@ -1003,16 +1095,39 @@ class MessageFile(db.Model): db.Index("message_file_created_by_idx", "created_by"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - message_id = db.Column(StringUUID, nullable=False) - type = db.Column(db.String(255), nullable=False) - transfer_method = db.Column(db.String(255), nullable=False) - url = db.Column(db.Text, nullable=True) - belongs_to = db.Column(db.String(255), nullable=True) - upload_file_id = db.Column(StringUUID, nullable=True) - created_by_role = db.Column(db.String(255), nullable=False) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + def __init__( + self, + *, + message_id: str, + type: FileType, + transfer_method: FileTransferMethod, + url: str | None = None, + belongs_to: Literal["user", "assistant"] | None = None, + upload_file_id: str | None = None, + created_by_role: CreatedByRole, + created_by: str, + ): + self.message_id = message_id + self.type = type + self.transfer_method = transfer_method + self.url = url + self.belongs_to = belongs_to + self.upload_file_id = upload_file_id + self.created_by_role = created_by_role.value + self.created_by = created_by + + id: Mapped[str] = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) + message_id: Mapped[str] = db.Column(StringUUID, nullable=False) + type: Mapped[str] = db.Column(db.String(255), nullable=False) + transfer_method: Mapped[str] = db.Column(db.String(255), nullable=False) + url: Mapped[Optional[str]] = db.Column(db.Text, nullable=True) + belongs_to: Mapped[Optional[str]] = db.Column(db.String(255), nullable=True) + upload_file_id: Mapped[Optional[str]] = db.Column(StringUUID, nullable=True) + created_by_role: Mapped[str] = db.Column(db.String(255), nullable=False) + created_by: Mapped[str] = db.Column(StringUUID, nullable=False) + created_at: Mapped[datetime] = db.Column( + db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)") + ) class MessageAnnotation(db.Model): @@ -1250,21 +1365,58 @@ class UploadFile(db.Model): db.Index("upload_file_tenant_idx", "tenant_id"), ) - id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id = db.Column(StringUUID, nullable=False) - storage_type = db.Column(db.String(255), nullable=False) - key = db.Column(db.String(255), nullable=False) - name = db.Column(db.String(255), nullable=False) - size = db.Column(db.Integer, nullable=False) - extension = db.Column(db.String(255), nullable=False) - mime_type = db.Column(db.String(255), nullable=True) - created_by_role = db.Column(db.String(255), nullable=False, server_default=db.text("'account'::character varying")) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) - used = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) - used_by = db.Column(StringUUID, nullable=True) - used_at = db.Column(db.DateTime, nullable=True) - hash = db.Column(db.String(255), nullable=True) + id: Mapped[str] = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False) + storage_type: Mapped[str] = db.Column(db.String(255), nullable=False) + key: Mapped[str] = db.Column(db.String(255), nullable=False) + name: Mapped[str] = db.Column(db.String(255), nullable=False) + size: Mapped[int] = db.Column(db.Integer, nullable=False) + extension: Mapped[str] = db.Column(db.String(255), nullable=False) + mime_type: Mapped[str] = db.Column(db.String(255), nullable=True) + created_by_role: Mapped[str] = db.Column( + db.String(255), nullable=False, server_default=db.text("'account'::character varying") + ) + created_by: Mapped[str] = db.Column(StringUUID, nullable=False) + created_at: Mapped[datetime] = db.Column( + db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)") + ) + used: Mapped[bool] = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) + used_by: Mapped[str | None] = db.Column(StringUUID, nullable=True) + used_at: Mapped[datetime | None] = db.Column(db.DateTime, nullable=True) + hash: Mapped[str | None] = db.Column(db.String(255), nullable=True) + + def __init__( + self, + *, + tenant_id: str, + storage_type: str, + key: str, + name: str, + size: int, + extension: str, + mime_type: str, + created_by_role: CreatedByRole, + created_by: str, + created_at: datetime, + used: bool, + used_by: str | None = None, + used_at: datetime | None = None, + hash: str | None = None, + ) -> None: + self.tenant_id = tenant_id + self.storage_type = storage_type + self.key = key + self.name = name + self.size = size + self.extension = extension + self.mime_type = mime_type + self.created_by_role = created_by_role.value + self.created_by = created_by + self.created_at = created_at + self.used = used + self.used_by = used_by + self.used_at = used_at + self.hash = hash class ApiRequest(db.Model): diff --git a/api/models/tools.py b/api/models/tools.py index 861066a2d5..691f3f3cb6 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -1,6 +1,8 @@ import json +from typing import Optional from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle @@ -101,7 +103,7 @@ class ApiToolProvider(db.Model): icon = db.Column(db.String(255), nullable=False) # original schema schema = db.Column(db.Text, nullable=False) - schema_type_str = db.Column(db.String(40), nullable=False) + schema_type_str: Mapped[str] = db.Column(db.String(40), nullable=False) # who created this tool user_id = db.Column(StringUUID, nullable=False) # tenant id @@ -133,11 +135,11 @@ class ApiToolProvider(db.Model): return json.loads(self.credentials_str) @property - def user(self) -> Account: + def user(self) -> Account | None: return db.session.query(Account).filter(Account.id == self.user_id).first() @property - def tenant(self) -> Tenant: + def tenant(self) -> Tenant | None: return db.session.query(Tenant).filter(Tenant.id == self.tenant_id).first() @@ -203,11 +205,11 @@ class WorkflowToolProvider(db.Model): return ApiProviderSchemaType.value_of(self.schema_type_str) @property - def user(self) -> Account: + def user(self) -> Account | None: return db.session.query(Account).filter(Account.id == self.user_id).first() @property - def tenant(self) -> Tenant: + def tenant(self) -> Tenant | None: return db.session.query(Tenant).filter(Tenant.id == self.tenant_id).first() @property @@ -215,7 +217,7 @@ class WorkflowToolProvider(db.Model): return [WorkflowToolParameterConfiguration(**config) for config in json.loads(self.parameter_configuration)] @property - def app(self) -> App: + def app(self) -> App | None: return db.session.query(App).filter(App.id == self.app_id).first() @@ -288,27 +290,39 @@ class ToolConversationVariables(db.Model): class ToolFile(db.Model): - """ - store the file created by agent - """ - __tablename__ = "tool_files" __table_args__ = ( db.PrimaryKeyConstraint("id", name="tool_file_pkey"), - # add index for conversation_id db.Index("tool_file_conversation_id_idx", "conversation_id"), ) id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - # conversation user id - user_id = db.Column(StringUUID, nullable=False) - # tenant id - tenant_id = db.Column(StringUUID, nullable=False) - # conversation id - conversation_id = db.Column(StringUUID, nullable=True) - # file key - file_key = db.Column(db.String(255), nullable=False) - # mime type - mimetype = db.Column(db.String(255), nullable=False) - # original url - original_url = db.Column(db.String(2048), nullable=True) + user_id: Mapped[str] = db.Column(StringUUID, nullable=False) + tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False) + conversation_id: Mapped[Optional[str]] = db.Column(StringUUID, nullable=True) + file_key: Mapped[str] = db.Column(db.String(255), nullable=False) + mimetype: Mapped[str] = db.Column(db.String(255), nullable=False) + original_url: Mapped[Optional[str]] = db.Column(db.String(2048), nullable=True) + name: Mapped[str] = mapped_column(default="") + size: Mapped[int] = mapped_column(default=-1) + + def __init__( + self, + *, + user_id: str, + tenant_id: str, + conversation_id: Optional[str] = None, + file_key: str, + mimetype: str, + original_url: Optional[str] = None, + name: str, + size: int, + ): + self.user_id = user_id + self.tenant_id = tenant_id + self.conversation_id = conversation_id + self.file_key = file_key + self.mimetype = mimetype + self.original_url = original_url + self.name = name + self.size = size diff --git a/api/models/workflow.py b/api/models/workflow.py index 9c93ea4cea..e5fbcaf87e 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -5,41 +5,21 @@ from enum import Enum from typing import Any, Optional, Union from sqlalchemy import func -from sqlalchemy.orm import Mapped +from sqlalchemy.orm import Mapped, mapped_column import contexts from constants import HIDDEN_VALUE -from core.app.segments import SecretVariable, Variable, factory from core.helper import encrypter +from core.variables import SecretVariable, Variable from extensions.ext_database import db +from factories import variable_factory from libs import helper +from models.enums import CreatedByRole from .account import Account from .types import StringUUID -class CreatedByRole(Enum): - """ - Created By Role Enum - """ - - ACCOUNT = "account" - END_USER = "end_user" - - @classmethod - def value_of(cls, value: str) -> "CreatedByRole": - """ - Get value of given mode. - - :param value: mode value - :return: mode - """ - for mode in cls: - if mode.value == value: - return mode - raise ValueError(f"invalid created by role value {value}") - - class WorkflowType(Enum): """ Workflow Type Enum @@ -114,23 +94,23 @@ class Workflow(db.Model): db.Index("workflow_version_idx", "tenant_id", "app_id", "version"), ) - id: Mapped[str] = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) - tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False) - app_id: Mapped[str] = db.Column(StringUUID, nullable=False) - type: Mapped[str] = db.Column(db.String(255), nullable=False) - version: Mapped[str] = db.Column(db.String(255), nullable=False) - graph: Mapped[str] = db.Column(db.Text) - features: Mapped[str] = db.Column(db.Text) - created_by: Mapped[str] = db.Column(StringUUID, nullable=False) - created_at: Mapped[datetime] = db.Column( + id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + app_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + type: Mapped[str] = mapped_column(db.String(255), nullable=False) + version: Mapped[str] = mapped_column(db.String(255), nullable=False) + graph: Mapped[str] = mapped_column(db.Text) + _features: Mapped[str] = mapped_column("features") + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column( db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)") ) - updated_by: Mapped[str] = db.Column(StringUUID) - updated_at: Mapped[datetime] = db.Column(db.DateTime) - _environment_variables: Mapped[str] = db.Column( + updated_by: Mapped[str] = mapped_column(StringUUID) + updated_at: Mapped[datetime] = mapped_column(db.DateTime) + _environment_variables: Mapped[str] = mapped_column( "environment_variables", db.Text, nullable=False, server_default="{}" ) - _conversation_variables: Mapped[str] = db.Column( + _conversation_variables: Mapped[str] = mapped_column( "conversation_variables", db.Text, nullable=False, server_default="{}" ) @@ -169,6 +149,34 @@ class Workflow(db.Model): def graph_dict(self) -> Mapping[str, Any]: return json.loads(self.graph) if self.graph else {} + @property + def features(self) -> str: + """ + Convert old features structure to new features structure. + """ + if not self._features: + return self._features + + features = json.loads(self._features) + if features.get("file_upload", {}).get("image", {}).get("enabled", False): + image_enabled = True + image_number_limits = int(features["file_upload"]["image"].get("number_limits", 1)) + image_transfer_methods = features["file_upload"]["image"].get( + "transfer_methods", ["remote_url", "local_file"] + ) + features["file_upload"]["enabled"] = image_enabled + features["file_upload"]["number_limits"] = image_number_limits + features["file_upload"]["allowed_upload_methods"] = image_transfer_methods + features["file_upload"]["allowed_file_types"] = ["image"] + features["file_upload"]["allowed_extensions"] = [] + del features["file_upload"]["image"] + self._features = json.dumps(features) + return self._features + + @features.setter + def features(self, value: str) -> None: + self._features = value + @property def features_dict(self) -> Mapping[str, Any]: return json.loads(self.features) if self.features else {} @@ -227,7 +235,7 @@ class Workflow(db.Model): tenant_id = contexts.tenant_id.get() environment_variables_dict: dict[str, Any] = json.loads(self._environment_variables) - results = [factory.build_variable_from_mapping(v) for v in environment_variables_dict.values()] + results = [variable_factory.build_variable_from_mapping(v) for v in environment_variables_dict.values()] # decrypt secret variables value decrypt_func = ( @@ -240,6 +248,10 @@ class Workflow(db.Model): @environment_variables.setter def environment_variables(self, value: Sequence[Variable]): + if not value: + self._environment_variables = "{}" + return + tenant_id = contexts.tenant_id.get() value = list(value) @@ -288,7 +300,7 @@ class Workflow(db.Model): self._conversation_variables = "{}" variables_dict: dict[str, Any] = json.loads(self._conversation_variables) - results = [factory.build_variable_from_mapping(v) for v in variables_dict.values()] + results = [variable_factory.build_variable_from_mapping(v) for v in variables_dict.values()] return results @conversation_variables.setter @@ -299,28 +311,6 @@ class Workflow(db.Model): ) -class WorkflowRunTriggeredFrom(Enum): - """ - Workflow Run Triggered From Enum - """ - - DEBUGGING = "debugging" - APP_RUN = "app-run" - - @classmethod - def value_of(cls, value: str) -> "WorkflowRunTriggeredFrom": - """ - Get value of given mode. - - :param value: mode value - :return: mode - """ - for mode in cls: - if mode.value == value: - return mode - raise ValueError(f"invalid workflow run triggered from value {value}") - - class WorkflowRunStatus(Enum): """ Workflow Run Status Enum @@ -401,7 +391,7 @@ class WorkflowRun(db.Model): graph = db.Column(db.Text) inputs = db.Column(db.Text) status = db.Column(db.String(255), nullable=False) - outputs = db.Column(db.Text) + outputs: Mapped[str] = db.Column(db.Text) error = db.Column(db.Text) elapsed_time = db.Column(db.Float, nullable=False, server_default=db.text("0")) total_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0")) @@ -413,27 +403,27 @@ class WorkflowRun(db.Model): @property def created_by_account(self): - created_by_role = CreatedByRole.value_of(self.created_by_role) + created_by_role = CreatedByRole(self.created_by_role) return db.session.get(Account, self.created_by) if created_by_role == CreatedByRole.ACCOUNT else None @property def created_by_end_user(self): from models.model import EndUser - created_by_role = CreatedByRole.value_of(self.created_by_role) + created_by_role = CreatedByRole(self.created_by_role) return db.session.get(EndUser, self.created_by) if created_by_role == CreatedByRole.END_USER else None @property def graph_dict(self): - return json.loads(self.graph) if self.graph else None + return json.loads(self.graph) if self.graph else {} @property - def inputs_dict(self): - return json.loads(self.inputs) if self.inputs else None + def inputs_dict(self) -> Mapping[str, Any]: + return json.loads(self.inputs) if self.inputs else {} @property - def outputs_dict(self): - return json.loads(self.outputs) if self.outputs else None + def outputs_dict(self) -> Mapping[str, Any]: + return json.loads(self.outputs) if self.outputs else {} @property def message(self) -> Optional["Message"]: @@ -640,14 +630,14 @@ class WorkflowNodeExecution(db.Model): @property def created_by_account(self): - created_by_role = CreatedByRole.value_of(self.created_by_role) + created_by_role = CreatedByRole(self.created_by_role) return db.session.get(Account, self.created_by) if created_by_role == CreatedByRole.ACCOUNT else None @property def created_by_end_user(self): from models.model import EndUser - created_by_role = CreatedByRole.value_of(self.created_by_role) + created_by_role = CreatedByRole(self.created_by_role) return db.session.get(EndUser, self.created_by) if created_by_role == CreatedByRole.END_USER else None @property @@ -672,7 +662,7 @@ class WorkflowNodeExecution(db.Model): extras = {} if self.execution_metadata_dict: - from core.workflow.entities.node_entities import NodeType + from core.workflow.nodes import NodeType if self.node_type == NodeType.TOOL.value and "tool_info" in self.execution_metadata_dict: tool_info = self.execution_metadata_dict["tool_info"] @@ -759,14 +749,14 @@ class WorkflowAppLog(db.Model): @property def created_by_account(self): - created_by_role = CreatedByRole.value_of(self.created_by_role) + created_by_role = CreatedByRole(self.created_by_role) return db.session.get(Account, self.created_by) if created_by_role == CreatedByRole.ACCOUNT else None @property def created_by_end_user(self): from models.model import EndUser - created_by_role = CreatedByRole.value_of(self.created_by_role) + created_by_role = CreatedByRole(self.created_by_role) return db.session.get(EndUser, self.created_by) if created_by_role == CreatedByRole.END_USER else None @@ -800,4 +790,4 @@ class ConversationVariable(db.Model): def to_variable(self) -> Variable: mapping = json.loads(self.data) - return factory.build_variable_from_mapping(mapping) + return variable_factory.build_variable_from_mapping(mapping) diff --git a/api/poetry.lock b/api/poetry.lock index 9c45899699..618dbb4033 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -553,13 +553,13 @@ cryptography = "*" [[package]] name = "azure-ai-inference" -version = "1.0.0b4" +version = "1.0.0b5" description = "Microsoft Azure Ai Inference Client Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure-ai-inference-1.0.0b4.tar.gz", hash = "sha256:5464404bef337338d4af6eefde3af903400ddb8e5c9e6820f902303542fa0f72"}, - {file = "azure_ai_inference-1.0.0b4-py3-none-any.whl", hash = "sha256:e2c949f91845a8cd96cb9a61ffd432b5b0f4ce236b9be8c29d10f38e0a327412"}, + {file = "azure_ai_inference-1.0.0b5-py3-none-any.whl", hash = "sha256:0147653088033f1fd059d5f4bd0fedac82529fdcc7a0d2183d9508b3f80cf549"}, + {file = "azure_ai_inference-1.0.0b5.tar.gz", hash = "sha256:c95b490bcd670ccdeb1048dc2b45e0f8252a4d69a348ca15d4510d327b64dd0d"}, ] [package.dependencies] @@ -567,6 +567,9 @@ azure-core = ">=1.30.0" isodate = ">=0.6.1" typing-extensions = ">=4.6.0" +[package.extras] +opentelemetry = ["azure-core-tracing-opentelemetry"] + [[package]] name = "azure-ai-ml" version = "1.20.0" @@ -844,13 +847,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.40" +version = "1.35.47" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.40-py3-none-any.whl", hash = "sha256:072cc47f29cb1de4fa77ce6632e4f0480af29b70816973ff415fbaa3f50bd1db"}, - {file = "botocore-1.35.40.tar.gz", hash = "sha256:547e0a983856c7d7aeaa30fca2a283873c57c07366cd806d2d639856341b3c31"}, + {file = "botocore-1.35.47-py3-none-any.whl", hash = "sha256:05f4493119a96799ff84d43e78691efac3177e1aec8840cca99511de940e342a"}, + {file = "botocore-1.35.47.tar.gz", hash = "sha256:f8f703463d3cd8b6abe2bedc443a7ab29f0e2ff1588a2e83164b108748645547"}, ] [package.dependencies] @@ -863,47 +866,47 @@ crt = ["awscrt (==0.22.0)"] [[package]] name = "bottleneck" -version = "1.4.1" +version = "1.4.2" description = "Fast NumPy array functions written in C" optional = false -python-versions = "*" +python-versions = ">=3.9" files = [ - {file = "Bottleneck-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5be5fc34f03216d85f14d01ca12c857ee68f72d7c17dccd22743326200ba3b9f"}, - {file = "Bottleneck-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f44cc6ad1a44d3427009fa2c2298ef0b346b7024e30dc7fc9778f5b78f8c10c"}, - {file = "Bottleneck-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e5babe835350e9f8710b3f8ffb9d5d202b4b13d77bad0e0f9a395af16a55f36"}, - {file = "Bottleneck-1.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d22a9b4d9cef8bb218df15a13d4aa213042c434c595d5c732d7f4ad287dbe565"}, - {file = "Bottleneck-1.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f47cb74ad6675dc7a54ef9f6fa9e649134e98ba71e6c98b8a34054e48014c941"}, - {file = "Bottleneck-1.4.1-cp310-cp310-win32.whl", hash = "sha256:3a4acf90714de7783f4706eb19d42f4d32ac842082c7b42d6956530ef0884b19"}, - {file = "Bottleneck-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:54d60a44bf439c06d35c6c3abdb39aca3de330064c48922a6bad0a96b1f096d5"}, - {file = "Bottleneck-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d491dad4757e9bed4204b3b823dfe9996733e11fbd9d3abaec52329a30a02bcc"}, - {file = "Bottleneck-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9186982d57db422641f30a30201fc7b158166887ca803c2af975cab6c9febb7"}, - {file = "Bottleneck-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e5bdff742067f10c7fda48e981e4b50ba7c7f39f8627e8c9a1c7224457badd"}, - {file = "Bottleneck-1.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ab6086ced21414288a5b79a00fe359d463a3189f82b5a71adce931655ba93c2"}, - {file = "Bottleneck-1.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:359d3a08e7c1a0938b07d6117cab8b38d4140df6712fbe56491ad44f46878d2f"}, - {file = "Bottleneck-1.4.1-cp311-cp311-win32.whl", hash = "sha256:bb642a652e656a20ea239d241aa2c39e0e3d3882aacea7d3f8412d9ec7a5f185"}, - {file = "Bottleneck-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:bd89930c0375799aaf381a2a2924ca53a17ef84734d9e1a5763da7eb4499e53d"}, - {file = "Bottleneck-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32a0e516a66a0d048e993bc926aa878208e8db549d30e39bef7b933a81dcad26"}, - {file = "Bottleneck-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1be7a1589a553ce6575d77a0ae3404b4a0a69c647e1cda85e41f388a61c1210d"}, - {file = "Bottleneck-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:845e2975704c1e40cd27a3d1d3eb45af031e7f25edf826577dbedab33bf1674e"}, - {file = "Bottleneck-1.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fea946c0fed796b62ddd812ea6533d2733d7a070383236a0d9ba585e66619c30"}, - {file = "Bottleneck-1.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:23e16efc311df996321fde53537815acca26dca731395d9e6da36e743fc0d27f"}, - {file = "Bottleneck-1.4.1-cp312-cp312-win32.whl", hash = "sha256:75ab6c9daed8528f9f0f37efb9f15d0b9759b8902325abcfd38cb79126c76db4"}, - {file = "Bottleneck-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:7ce544479354ef3893c04b8f135b454c126cc236907879f552422855de5df35d"}, - {file = "Bottleneck-1.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:681e8889170d349bed1a7c30a4b32ba25f6988e1432217a91967fd7a5a963d7d"}, - {file = "Bottleneck-1.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d774d69ed3ba968b9b2e5271b14d013f85b820612ec4dbd8f0a32f23a07c6d77"}, - {file = "Bottleneck-1.4.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d0a0be22fa01c2698ff746a4e1e27238acd18409a353e2c7172b0e50b2f04a9"}, - {file = "Bottleneck-1.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:eb39b78f99304f174338c486a015d490707374a0c349167ba6ade7b6d3484304"}, - {file = "Bottleneck-1.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:052fcc86f5713a3192f3ce0a4f950c2ff451df8c43c9d0291e7c4bd624ae1292"}, - {file = "Bottleneck-1.4.1-cp313-cp313-win32.whl", hash = "sha256:cbcc5e36ba50a50d5d8084e2772008b33b9507d06c2c1642c8e2299709439881"}, - {file = "Bottleneck-1.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:3e36a7ab42bc0f1e2508f1c33308fb08c8454edc4694164ee8fdd46918adea03"}, - {file = "Bottleneck-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f12ab7f52881065323ac4ac1f8fd424cf5ecde0d9c0746945fb6e8b1a5cbfcac"}, - {file = "Bottleneck-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a76155466718066fcfb77329b29be71d1bce679b1a7daf693a7a08fbf42577b8"}, - {file = "Bottleneck-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cde4981d889f41f175a054ad5c3434b81ee8749f31b6e7b9ec92776ec4f7292f"}, - {file = "Bottleneck-1.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:351ddfedc51c2575c90d43a03e350bdc00020e219dfa877480df2b69dbc31eab"}, - {file = "Bottleneck-1.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:98032a0efb2a4f46223645600f53cb9f7c50dbec8c146e2869585dc9f81fa0ea"}, - {file = "Bottleneck-1.4.1-cp39-cp39-win32.whl", hash = "sha256:ddc2218dc8e8b626d1645e57596bc353559facb99c362c9ba5eb6d5b4a12d5fc"}, - {file = "Bottleneck-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d6f275a727a59c27897e3e29dabde4d8af299fcfc667f13a9b00fd2a1ab72178"}, - {file = "bottleneck-1.4.1.tar.gz", hash = "sha256:58c66619db62291c9ca3b497b05f40f7b1ff9ac010b88bff1925f3101dae2c89"}, + {file = "Bottleneck-1.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:125436df93751a226eab1732783aa8f6125e88e779587aa61be071fb66e41f9d"}, + {file = "Bottleneck-1.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c6df9a60ec6ab88fec934ca864266ba95edd89c490af71dc9cd8afb2a54ebd9"}, + {file = "Bottleneck-1.4.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2fe327dc2d0564e295a5857a252755103f8c6e05b07d3ff80a69afaa9f5065"}, + {file = "Bottleneck-1.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6b7790ca8658cd69e3cc0d0e4ff0e9829d60849bf7945fbd7344fbce05b2bbb8"}, + {file = "Bottleneck-1.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6282fa925ac3768f66e3547f89a512376d3f9de7ef53bdd37aa29232fd864054"}, + {file = "Bottleneck-1.4.2-cp310-cp310-win32.whl", hash = "sha256:e56a206fbf48e3b8054a964398bf1ed843e9625d3c6bdbeb7898cb48bf97441b"}, + {file = "Bottleneck-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:eb0c611d15b0fd8f511d288e8964e4725b4b3b0d9d310880cf0ff6b8dd03c859"}, + {file = "Bottleneck-1.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b6902ebf3e85315b481bc084f10c5770f8240275ad1e039ac69c7c8d2013b040"}, + {file = "Bottleneck-1.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2fd34b9b490204f95288f0dd35d37042486a95029617246c88c0f94a0ab49fe"}, + {file = "Bottleneck-1.4.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122845e3106c85465551d4a9a3777841347cfedfbebb3aa985cca110e07030b1"}, + {file = "Bottleneck-1.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1f61658ebdf5a178298544336b65020730bf86cc092dab5f6579a99a86bd888b"}, + {file = "Bottleneck-1.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7c7d29c044a3511b36fd744503c3e697e279c273a8477a6d91a2831d04fd19e0"}, + {file = "Bottleneck-1.4.2-cp311-cp311-win32.whl", hash = "sha256:c663cbba8f52011fd82ee08c6a85c93b34b19e0e7ebba322d2d67809f34e0597"}, + {file = "Bottleneck-1.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:89651ef18c06616850203bf8875c958c5d316ea48d8ba60d9b450199d39ae391"}, + {file = "Bottleneck-1.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a74ddd0417f42eeaba37375f0fc065b28451e0fba45cb2f99e88880b10b3fa43"}, + {file = "Bottleneck-1.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:070d22f2f62ab81297380a89492cca931e4d9443fa4b84c2baeb52db09c3b1b4"}, + {file = "Bottleneck-1.4.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fc4e7645bd425c05e05acd5541e9e09cb4179e71164e862f082561bf4509eac"}, + {file = "Bottleneck-1.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:037315c56605128a39f77d19af6a6019dc8c21a63694a4bfef3c026ed963be2e"}, + {file = "Bottleneck-1.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:99778329331d5fae8df19772a019e8b73ba4d9d1650f110cd995ab7657114db0"}, + {file = "Bottleneck-1.4.2-cp312-cp312-win32.whl", hash = "sha256:7363b3c8ce6ca433779cd7e96bcb94c0e516dcacadff0011adcbf0b3ac86bc9d"}, + {file = "Bottleneck-1.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:48c6b9d9287c4102b803fcb01ae66ae7ef6b310b711b4b7b7e23bf952894dc05"}, + {file = "Bottleneck-1.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c1c885ad02a6a8fa1f7ee9099f29b9d4c03eb1da2c7ab25839482d5cce739021"}, + {file = "Bottleneck-1.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7a1b023de1de3d84b18826462718fba548fed41870df44354f9ab6a414ea82f"}, + {file = "Bottleneck-1.4.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c9dbaf737b605b30c81611f2c1d197c2fd2e46c33f605876c1d332d3360c4fc"}, + {file = "Bottleneck-1.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7ebbcbe5d4062e37507b9a81e2aacdb1fcccc6193f7feff124ef2b5a6a5eb740"}, + {file = "Bottleneck-1.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:964f6ac4118ddab3bbbac79d4f726b093459be751baba73ee0aa364666e8068e"}, + {file = "Bottleneck-1.4.2-cp313-cp313-win32.whl", hash = "sha256:2db287f6ecdbb1c998085eca9b717fec2bfc48a4ab6ae070a9820ba8ab59c90b"}, + {file = "Bottleneck-1.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:26b5f0531f7044befaad95c20365dd666372e66bdacbfaf009ff65d60285534d"}, + {file = "Bottleneck-1.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:72d6aa95cdd782833d2589f81434fd865ba004b8938e07920b6ef02796ce8918"}, + {file = "Bottleneck-1.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b33e83665e7daf7f513fe1f7b04b13944d44b6635c45d5a9c89c9e5ed11811b6"}, + {file = "Bottleneck-1.4.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52248f3e0fead78c17912fb086a585c86f567019247d21c69e87645241b97b02"}, + {file = "Bottleneck-1.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:dce1a3c5ff89a56fb2678c9bda17b89f60f710d6002ab7cd72b7661bc3fae64d"}, + {file = "Bottleneck-1.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:48d2e101d99a9d72aa86da1a048d2094f4e1db0cf77519d1c33239f9d62da162"}, + {file = "Bottleneck-1.4.2-cp39-cp39-win32.whl", hash = "sha256:9d7b12936516f944e3d981a64038f99acb21f0e99f92fad16d9a468248c2b231"}, + {file = "Bottleneck-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7b459d08f1f3e2da85db0a9e2d3e6e3541105f5866e9026dbca32dafc5106f2b"}, + {file = "bottleneck-1.4.2.tar.gz", hash = "sha256:fa8e8e1799dea5483ce6669462660f9d9a95649f6f98a80d315b84ec89f449f4"}, ] [package.dependencies] @@ -1882,38 +1885,38 @@ files = [ [[package]] name = "cryptography" -version = "43.0.1" +version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, - {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, - {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, - {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, - {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, - {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, - {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] [package.dependencies] @@ -1926,7 +1929,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -2214,18 +2217,18 @@ files = [ [[package]] name = "duckduckgo-search" -version = "6.3.0" +version = "6.3.2" description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine." optional = false python-versions = ">=3.8" files = [ - {file = "duckduckgo_search-6.3.0-py3-none-any.whl", hash = "sha256:9a231a7b325226811cf7d35a240f3f501e718ae10a1aa0a638cabc80e129dfe7"}, - {file = "duckduckgo_search-6.3.0.tar.gz", hash = "sha256:e9f56955569325a7d9cacda2488ca78bf6629a459e74415892bee560b664f5eb"}, + {file = "duckduckgo_search-6.3.2-py3-none-any.whl", hash = "sha256:cd631275292460d590d1d496995d002bf2fe6db9752713fab17b9e95924ced98"}, + {file = "duckduckgo_search-6.3.2.tar.gz", hash = "sha256:53dbf45f8749bfc67483eb9f281f2e722a5fe644d61c54ed9e551d26cb6bcbf2"}, ] [package.dependencies] click = ">=8.1.7" -primp = ">=0.6.3" +primp = ">=0.6.4" [package.extras] dev = ["mypy (>=1.11.1)", "pytest (>=8.3.1)", "pytest-asyncio (>=0.23.8)", "ruff (>=0.6.1)"] @@ -2339,6 +2342,20 @@ files = [ {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, ] +[[package]] +name = "eval-type-backport" +version = "0.2.0" +description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." +optional = false +python-versions = ">=3.8" +files = [ + {file = "eval_type_backport-0.2.0-py3-none-any.whl", hash = "sha256:ac2f73d30d40c5a30a80b8739a789d6bb5e49fdffa66d7912667e2015d9c9933"}, + {file = "eval_type_backport-0.2.0.tar.gz", hash = "sha256:68796cfbc7371ebf923f03bdf7bef415f3ec098aeced24e054b253a0e78f7b37"}, +] + +[package.extras] +tests = ["pytest"] + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -2355,18 +2372,18 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.115.2" +version = "0.115.3" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.115.2-py3-none-any.whl", hash = "sha256:61704c71286579cc5a598763905928f24ee98bfcc07aabe84cfefb98812bbc86"}, - {file = "fastapi-0.115.2.tar.gz", hash = "sha256:3995739e0b09fa12f984bce8fa9ae197b35d433750d3d312422d846e283697ee"}, + {file = "fastapi-0.115.3-py3-none-any.whl", hash = "sha256:8035e8f9a2b0aa89cea03b6c77721178ed5358e1aea4cd8570d9466895c0638c"}, + {file = "fastapi-0.115.3.tar.gz", hash = "sha256:c091c6a35599c036d676fa24bd4a6e19fa30058d93d950216cdc672881f6f7db"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.37.2,<0.41.0" +starlette = ">=0.40.0,<0.42.0" typing-extensions = ">=4.8.0" [package.extras] @@ -2761,99 +2778,114 @@ files = [ [[package]] name = "frozenlist" -version = "1.4.1" +version = "1.5.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"}, + {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"}, + {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"}, + {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"}, + {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"}, + {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"}, + {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"}, + {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"}, + {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e"}, + {file = "frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723"}, + {file = "frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"}, + {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"}, + {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"}, + {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"}, + {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"}, ] [[package]] name = "fsspec" -version = "2024.9.0" +version = "2024.10.0" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b"}, - {file = "fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8"}, + {file = "fsspec-2024.10.0-py3-none-any.whl", hash = "sha256:03b9a6785766a4de40368b88906366755e2819e758b83705c88cd7cb5fe81871"}, + {file = "fsspec-2024.10.0.tar.gz", hash = "sha256:eda2d8a4116d4f2429db8550f2457da57279247dd930bb12f821b58391359493"}, ] [package.extras] @@ -3391,13 +3423,13 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "gotrue" -version = "2.9.2" +version = "2.9.3" description = "Python Client Library for Supabase Auth" optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "gotrue-2.9.2-py3-none-any.whl", hash = "sha256:fcd5279e8f1cc630f3ac35af5485fe39f8030b23906776920d2c32a4e308cff4"}, - {file = "gotrue-2.9.2.tar.gz", hash = "sha256:57b3245e916c5efbf19a21b1181011a903c1276bb1df2d847558f2f24f29abb2"}, + {file = "gotrue-2.9.3-py3-none-any.whl", hash = "sha256:9d2e9c74405d879f4828e0a7b94daf167a6e109c10ae6e5c59a0e21446f6e423"}, + {file = "gotrue-2.9.3.tar.gz", hash = "sha256:051551d80e642bdd2ab42cac78207745d89a2a08f429a1512d82624e675d8255"}, ] [package.dependencies] @@ -3508,70 +3540,70 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [[package]] name = "grpcio" -version = "1.66.2" +version = "1.67.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"}, - {file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73"}, - {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf"}, - {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50"}, - {file = "grpcio-1.66.2-cp310-cp310-win32.whl", hash = "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39"}, - {file = "grpcio-1.66.2-cp310-cp310-win_amd64.whl", hash = "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249"}, - {file = "grpcio-1.66.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8"}, - {file = "grpcio-1.66.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a"}, - {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae"}, - {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01"}, - {file = "grpcio-1.66.2-cp311-cp311-win32.whl", hash = "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8"}, - {file = "grpcio-1.66.2-cp311-cp311-win_amd64.whl", hash = "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d"}, - {file = "grpcio-1.66.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf"}, - {file = "grpcio-1.66.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd"}, - {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee"}, - {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c"}, - {file = "grpcio-1.66.2-cp312-cp312-win32.whl", hash = "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453"}, - {file = "grpcio-1.66.2-cp312-cp312-win_amd64.whl", hash = "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679"}, - {file = "grpcio-1.66.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d"}, - {file = "grpcio-1.66.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46"}, - {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a"}, - {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b"}, - {file = "grpcio-1.66.2-cp313-cp313-win32.whl", hash = "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75"}, - {file = "grpcio-1.66.2-cp313-cp313-win_amd64.whl", hash = "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf"}, - {file = "grpcio-1.66.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3"}, - {file = "grpcio-1.66.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd"}, - {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8"}, - {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec"}, - {file = "grpcio-1.66.2-cp38-cp38-win32.whl", hash = "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3"}, - {file = "grpcio-1.66.2-cp38-cp38-win_amd64.whl", hash = "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c"}, - {file = "grpcio-1.66.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d"}, - {file = "grpcio-1.66.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc"}, - {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e"}, - {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e"}, - {file = "grpcio-1.66.2-cp39-cp39-win32.whl", hash = "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7"}, - {file = "grpcio-1.66.2-cp39-cp39-win_amd64.whl", hash = "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987"}, - {file = "grpcio-1.66.2.tar.gz", hash = "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231"}, + {file = "grpcio-1.67.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:bd79929b3bb96b54df1296cd3bf4d2b770bd1df6c2bdf549b49bab286b925cdc"}, + {file = "grpcio-1.67.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:16724ffc956ea42967f5758c2f043faef43cb7e48a51948ab593570570d1e68b"}, + {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:2b7183c80b602b0ad816315d66f2fb7887614ead950416d60913a9a71c12560d"}, + {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe32b45dd6d118f5ea2e5deaed417d8a14976325c93812dd831908522b402c9"}, + {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe89295219b9c9e47780a0f1c75ca44211e706d1c598242249fe717af3385ec8"}, + {file = "grpcio-1.67.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa8d025fae1595a207b4e47c2e087cb88d47008494db258ac561c00877d4c8f8"}, + {file = "grpcio-1.67.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f95e15db43e75a534420e04822df91f645664bf4ad21dfaad7d51773c80e6bb4"}, + {file = "grpcio-1.67.0-cp310-cp310-win32.whl", hash = "sha256:a6b9a5c18863fd4b6624a42e2712103fb0f57799a3b29651c0e5b8119a519d65"}, + {file = "grpcio-1.67.0-cp310-cp310-win_amd64.whl", hash = "sha256:b6eb68493a05d38b426604e1dc93bfc0137c4157f7ab4fac5771fd9a104bbaa6"}, + {file = "grpcio-1.67.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:e91d154689639932305b6ea6f45c6e46bb51ecc8ea77c10ef25aa77f75443ad4"}, + {file = "grpcio-1.67.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cb204a742997277da678611a809a8409657b1398aaeebf73b3d9563b7d154c13"}, + {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:ae6de510f670137e755eb2a74b04d1041e7210af2444103c8c95f193340d17ee"}, + {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74b900566bdf68241118f2918d312d3bf554b2ce0b12b90178091ea7d0a17b3d"}, + {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4e95e43447a02aa603abcc6b5e727d093d161a869c83b073f50b9390ecf0fa8"}, + {file = "grpcio-1.67.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0bb94e66cd8f0baf29bd3184b6aa09aeb1a660f9ec3d85da615c5003154bc2bf"}, + {file = "grpcio-1.67.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:82e5bd4b67b17c8c597273663794a6a46a45e44165b960517fe6d8a2f7f16d23"}, + {file = "grpcio-1.67.0-cp311-cp311-win32.whl", hash = "sha256:7fc1d2b9fd549264ae585026b266ac2db53735510a207381be509c315b4af4e8"}, + {file = "grpcio-1.67.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac11ecb34a86b831239cc38245403a8de25037b448464f95c3315819e7519772"}, + {file = "grpcio-1.67.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:227316b5631260e0bef8a3ce04fa7db4cc81756fea1258b007950b6efc90c05d"}, + {file = "grpcio-1.67.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d90cfdafcf4b45a7a076e3e2a58e7bc3d59c698c4f6470b0bb13a4d869cf2273"}, + {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:77196216d5dd6f99af1c51e235af2dd339159f657280e65ce7e12c1a8feffd1d"}, + {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15c05a26a0f7047f720da41dc49406b395c1470eef44ff7e2c506a47ac2c0591"}, + {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3840994689cc8cbb73d60485c594424ad8adb56c71a30d8948d6453083624b52"}, + {file = "grpcio-1.67.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5a1e03c3102b6451028d5dc9f8591131d6ab3c8a0e023d94c28cb930ed4b5f81"}, + {file = "grpcio-1.67.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:682968427a63d898759474e3b3178d42546e878fdce034fd7474ef75143b64e3"}, + {file = "grpcio-1.67.0-cp312-cp312-win32.whl", hash = "sha256:d01793653248f49cf47e5695e0a79805b1d9d4eacef85b310118ba1dfcd1b955"}, + {file = "grpcio-1.67.0-cp312-cp312-win_amd64.whl", hash = "sha256:985b2686f786f3e20326c4367eebdaed3e7aa65848260ff0c6644f817042cb15"}, + {file = "grpcio-1.67.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:8c9a35b8bc50db35ab8e3e02a4f2a35cfba46c8705c3911c34ce343bd777813a"}, + {file = "grpcio-1.67.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:42199e704095b62688998c2d84c89e59a26a7d5d32eed86d43dc90e7a3bd04aa"}, + {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c4c425f440fb81f8d0237c07b9322fc0fb6ee2b29fbef5f62a322ff8fcce240d"}, + {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:323741b6699cd2b04a71cb38f502db98f90532e8a40cb675393d248126a268af"}, + {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:662c8e105c5e5cee0317d500eb186ed7a93229586e431c1bf0c9236c2407352c"}, + {file = "grpcio-1.67.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f6bd2ab135c64a4d1e9e44679a616c9bc944547357c830fafea5c3caa3de5153"}, + {file = "grpcio-1.67.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:2f55c1e0e2ae9bdd23b3c63459ee4c06d223b68aeb1961d83c48fb63dc29bc03"}, + {file = "grpcio-1.67.0-cp313-cp313-win32.whl", hash = "sha256:fd6bc27861e460fe28e94226e3673d46e294ca4673d46b224428d197c5935e69"}, + {file = "grpcio-1.67.0-cp313-cp313-win_amd64.whl", hash = "sha256:cf51d28063338608cd8d3cd64677e922134837902b70ce00dad7f116e3998210"}, + {file = "grpcio-1.67.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:7f200aca719c1c5dc72ab68be3479b9dafccdf03df530d137632c534bb6f1ee3"}, + {file = "grpcio-1.67.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0892dd200ece4822d72dd0952f7112c542a487fc48fe77568deaaa399c1e717d"}, + {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f4d613fbf868b2e2444f490d18af472ccb47660ea3df52f068c9c8801e1f3e85"}, + {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c69bf11894cad9da00047f46584d5758d6ebc9b5950c0dc96fec7e0bce5cde9"}, + {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9bca3ca0c5e74dea44bf57d27e15a3a3996ce7e5780d61b7c72386356d231db"}, + {file = "grpcio-1.67.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:014dfc020e28a0d9be7e93a91f85ff9f4a87158b7df9952fe23cc42d29d31e1e"}, + {file = "grpcio-1.67.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d4ea4509d42c6797539e9ec7496c15473177ce9abc89bc5c71e7abe50fc25737"}, + {file = "grpcio-1.67.0-cp38-cp38-win32.whl", hash = "sha256:9d75641a2fca9ae1ae86454fd25d4c298ea8cc195dbc962852234d54a07060ad"}, + {file = "grpcio-1.67.0-cp38-cp38-win_amd64.whl", hash = "sha256:cff8e54d6a463883cda2fab94d2062aad2f5edd7f06ae3ed030f2a74756db365"}, + {file = "grpcio-1.67.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:62492bd534979e6d7127b8a6b29093161a742dee3875873e01964049d5250a74"}, + {file = "grpcio-1.67.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eef1dce9d1a46119fd09f9a992cf6ab9d9178b696382439446ca5f399d7b96fe"}, + {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f623c57a5321461c84498a99dddf9d13dac0e40ee056d884d6ec4ebcab647a78"}, + {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54d16383044e681f8beb50f905249e4e7261dd169d4aaf6e52eab67b01cbbbe2"}, + {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2a44e572fb762c668e4812156b81835f7aba8a721b027e2d4bb29fb50ff4d33"}, + {file = "grpcio-1.67.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:391df8b0faac84d42f5b8dfc65f5152c48ed914e13c522fd05f2aca211f8bfad"}, + {file = "grpcio-1.67.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfd9306511fdfc623a1ba1dc3bc07fbd24e6cfbe3c28b4d1e05177baa2f99617"}, + {file = "grpcio-1.67.0-cp39-cp39-win32.whl", hash = "sha256:30d47dbacfd20cbd0c8be9bfa52fdb833b395d4ec32fe5cff7220afc05d08571"}, + {file = "grpcio-1.67.0-cp39-cp39-win_amd64.whl", hash = "sha256:f55f077685f61f0fbd06ea355142b71e47e4a26d2d678b3ba27248abfe67163a"}, + {file = "grpcio-1.67.0.tar.gz", hash = "sha256:e090b2553e0da1c875449c8e75073dd4415dd71c9bde6a406240fdf4c0ee467c"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.66.2)"] +protobuf = ["grpcio-tools (>=1.67.0)"] [[package]] name = "grpcio-status" @@ -3870,54 +3902,54 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 [[package]] name = "httptools" -version = "0.6.2" +version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = false python-versions = ">=3.8.0" files = [ - {file = "httptools-0.6.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0238f07780782c018e9801d8f5f5aea3a4680a1af132034b444f677718c6fe88"}, - {file = "httptools-0.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:10d28e5597d4349390c640232c9366ddc15568114f56724fe30a53de9686b6ab"}, - {file = "httptools-0.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ddaf99e362ae4169f6a8b3508f3487264e0a1b1e58c0b07b86407bc9ecee831"}, - {file = "httptools-0.6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc9d039b6b8a36b182bc60774bb5d456b8ff9ec44cf97719f2f38bb1dcdd546"}, - {file = "httptools-0.6.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b57cb8a4a8a8ffdaf0395326ef3b9c1aba36e58a421438fc04c002a1f511db63"}, - {file = "httptools-0.6.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b73cda1326738eab5d60640ca0b87ac4e4db09a099423c41b59a5681917e8d1d"}, - {file = "httptools-0.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:352a496244360deb1c1d108391d76cd6f3dd9f53ccf975a082e74c6761af30c9"}, - {file = "httptools-0.6.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2e9d225b178a6cc700c23cf2f5daf85a10f93f1db7c34e9ee4ee0bbc29ad458a"}, - {file = "httptools-0.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49b14fcc9b12a52da8667587efa124a18e1a3eb63bbbcabf9882f4008d171d6"}, - {file = "httptools-0.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d5c33d98b2311ddbe06e92b12b14de334dcfbe64ebcbb2c7a34b5c6036db512"}, - {file = "httptools-0.6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53cd2d776700bf0ed0e6fb203d716b041712ea4906479031cc5ac5421ecaa7d2"}, - {file = "httptools-0.6.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7da016a0dab1fcced89dfff8537033c5dc200015e14023368f3f4a69e39b8716"}, - {file = "httptools-0.6.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d6e0ba155a1b3159551ac6b4551eb20028617e2e4bb71f2c61efed0756e6825"}, - {file = "httptools-0.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:ad44569b0f508e046ffe85b4a547d5b68d1548fd90767df69449cc28021ee709"}, - {file = "httptools-0.6.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c92d2b7c1a914ab2f66454961eeaf904f4fe7529b93ff537619d22c18b82d070"}, - {file = "httptools-0.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f920a75c1dbcb5a48a495f384d73ceb41e437a966c318eb7e56f1c1ad1df3e"}, - {file = "httptools-0.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56bcd9ba0adf16edb4e3e45b8b9346f5b3b2372402e953d54c84b345d0f691e0"}, - {file = "httptools-0.6.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e350a887adb38ac65c93c2f395b60cf482baca61fd396ed8d6fd313dbcce6fac"}, - {file = "httptools-0.6.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ddc328c2a2daf2cf4bdc7bbc8a458dc4c840637223d4b8e01bce2168cc79fd23"}, - {file = "httptools-0.6.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ddaf38943dbb32333a182c894b6092a68b56c5e36d0c54ba3761d28119b15447"}, - {file = "httptools-0.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:052f7f50e4a38f069478143878371ed17937f268349bcd68f6f7a9de9fcfce21"}, - {file = "httptools-0.6.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:406f7dc5d9db68cd9ac638d14c74d077085f76b45f704d3ec38d43b842b3cb44"}, - {file = "httptools-0.6.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:77e22c33123ce11231ff2773d8905e20b45d77a69459def7481283b72a583955"}, - {file = "httptools-0.6.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41965586b02715c3d83dd9153001f654e5b621de0c5255f5ef0635485212d0c0"}, - {file = "httptools-0.6.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93b1839d54b80a06a51a31b90d024a1770e250d00de57e7ae069bafba932f398"}, - {file = "httptools-0.6.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8fdb4634040d1dbde7e0b373e19668cdb61c0ee8690d3b4064ac748d85365bca"}, - {file = "httptools-0.6.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c30902f9b9da0d74668b6f71d7b57081a4879d9a5ea93d5922dbe15b15b3b24a"}, - {file = "httptools-0.6.2-cp313-cp313-win_amd64.whl", hash = "sha256:cf61238811a75335751b4b17f8b221a35f93f2d57489296742adf98412d2a568"}, - {file = "httptools-0.6.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8d80878cb40ebf88a48839ff7206ceb62e4b54327e0c2f9f15ee12edbd8b907e"}, - {file = "httptools-0.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5141ccc9dbd8cdc59d1e93e318d405477a940dc6ebadcb8d9f8da17d2812d353"}, - {file = "httptools-0.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb67d47f045f56e9a5da4deccf710bdde21212e4b1f4776b7a542449f6a7682"}, - {file = "httptools-0.6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dcb8f5c866f1537ccbaad01ebb3611890d281ef8d25e050d1cc3d90fba6b3d"}, - {file = "httptools-0.6.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1b7bc59362143dc2d02896dde94004ef54ff1989ceedf4b389ad3b530f312364"}, - {file = "httptools-0.6.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c7a5715b1f46e9852442f496c0df2f8c393cc8f293f5396d2c8d95cac852fb51"}, - {file = "httptools-0.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:3f0246ca7f78fa8e3902ddb985b9f55509d417a862f4634a8fa63a7a496266c8"}, - {file = "httptools-0.6.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1099f73952e18c718ccaaf7a97ae58c94a91839c3d247c6184326f85a2eda7b4"}, - {file = "httptools-0.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3e45d004531330030f7d07abe4865bc17963b9989bc1941cebbf7224010fb82"}, - {file = "httptools-0.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f2fea370361a90cb9330610a95303587eda9d1e69930dbbee9978eac1d5946"}, - {file = "httptools-0.6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0481154c91725f7e7b729a535190388be6c7cbae3bbf0e793343ca386282312"}, - {file = "httptools-0.6.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d25f8fdbc6cc6561353c7a384d76295e6a85a4945115b8bc347855db150e8c77"}, - {file = "httptools-0.6.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:054bdee08e4f7c15c186f6e7dbc8f0cf974b8dd1832b5f17f988faf8b12815c9"}, - {file = "httptools-0.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:4502620722b453c2c6306fad392c515dcb804dfa9c6d3b90d8926a07a7a01109"}, - {file = "httptools-0.6.2.tar.gz", hash = "sha256:ae694efefcb61317c79b2fa1caebc122060992408e389bb00889567e463a47f1"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, ] [package.extras] @@ -4261,6 +4293,17 @@ files = [ [package.dependencies] ply = "*" +[[package]] +name = "jsonpath-python" +version = "1.0.6" +description = "A more powerful JSONPath implementation in modern python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "jsonpath-python-1.0.6.tar.gz", hash = "sha256:dd5be4a72d8a2995c3f583cf82bf3cd1a9544cfdabf2d22595b67aff07349666"}, + {file = "jsonpath_python-1.0.6-py3-none-any.whl", hash = "sha256:1e3b78df579f5efc23565293612decee04214609208a2335884b3ee3f786b575"}, +] + [[package]] name = "jsonschema" version = "4.23.0" @@ -4535,13 +4578,13 @@ openai = ["openai (>=0.27.8)"] [[package]] name = "langsmith" -version = "0.1.135" +version = "0.1.137" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.135-py3-none-any.whl", hash = "sha256:b1d1ca3bad483a4239745c57e9b9157b4d099fbf3149be21e3d112c94ede06ac"}, - {file = "langsmith-0.1.135.tar.gz", hash = "sha256:7abed7e141386af99a2177f0b3600b124ae3ad1b482879ba0724ce92ef998a11"}, + {file = "langsmith-0.1.137-py3-none-any.whl", hash = "sha256:4256d5c61133749890f7b5c88321dbb133ce0f440c621ea28e76513285859b81"}, + {file = "langsmith-0.1.137.tar.gz", hash = "sha256:56cdfcc6c74cb20a3f437d5bd144feb5bf93f54c5a2918d1e568cbd084a372d4"}, ] [package.dependencies] @@ -4825,13 +4868,13 @@ urllib3 = ">=1.23" [[package]] name = "mako" -version = "1.3.5" +version = "1.3.6" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, + {file = "Mako-1.3.6-py3-none-any.whl", hash = "sha256:a91198468092a2f1a0de86ca92690fb0cfc43ca90ee17e15d93662b4c04b241a"}, + {file = "mako-1.3.6.tar.gz", hash = "sha256:9ec3a1583713479fae654f83ed9fa8c9a4c16b7bb0daba0e6bbebff50c0d983d"}, ] [package.dependencies] @@ -4883,92 +4926,92 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "3.0.1" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" files = [ - {file = "MarkupSafe-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-win32.whl", hash = "sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97"}, - {file = "MarkupSafe-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-win32.whl", hash = "sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635"}, - {file = "MarkupSafe-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-win32.whl", hash = "sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa"}, - {file = "MarkupSafe-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-win32.whl", hash = "sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c"}, - {file = "MarkupSafe-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-win32.whl", hash = "sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b"}, - {file = "MarkupSafe-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-win32.whl", hash = "sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8"}, - {file = "MarkupSafe-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b"}, - {file = "markupsafe-3.0.1.tar.gz", hash = "sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] name = "marshmallow" -version = "3.22.0" +version = "3.23.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, - {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, + {file = "marshmallow-3.23.0-py3-none-any.whl", hash = "sha256:82f20a2397834fe6d9611b241f2f7e7b680ed89c49f84728a1ad937be6b4bdf4"}, + {file = "marshmallow-3.23.0.tar.gz", hash = "sha256:98d8827a9f10c03d44ead298d2e99c6aea8197df18ccfad360dae7f89a50da2e"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "pytz", "simplejson"] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "simplejson"] [[package]] name = "matplotlib" @@ -5247,23 +5290,6 @@ files = [ msal = ">=1.29,<2" portalocker = ">=1.4,<3" -[[package]] -name = "msg-parser" -version = "1.2.0" -description = "This module enables reading, parsing and converting Microsoft Outlook MSG E-Mail files." -optional = false -python-versions = ">=3.4" -files = [ - {file = "msg_parser-1.2.0-py2.py3-none-any.whl", hash = "sha256:d47a2f0b2a359cb189fad83cc991b63ea781ecc70d91410324273fbf93e95375"}, - {file = "msg_parser-1.2.0.tar.gz", hash = "sha256:0de858d4fcebb6c8f6f028da83a17a20fe01cdce67c490779cf43b3b0162aa66"}, -] - -[package.dependencies] -olefile = ">=0.46" - -[package.extras] -rtf = ["compressed-rtf (>=1.0.5)"] - [[package]] name = "msrest" version = "0.7.1" @@ -5439,6 +5465,17 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + [[package]] name = "newspaper3k" version = "0.2.8" @@ -5467,13 +5504,13 @@ tldextract = ">=2.0.1" [[package]] name = "nltk" -version = "3.8.1" +version = "3.9.1" description = "Natural Language Toolkit" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"}, - {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"}, + {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, + {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, ] [package.dependencies] @@ -5762,13 +5799,13 @@ sympy = "*" [[package]] name = "openai" -version = "1.51.2" +version = "1.52.2" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.51.2-py3-none-any.whl", hash = "sha256:5c5954711cba931423e471c37ff22ae0fd3892be9b083eee36459865fbbb83fa"}, - {file = "openai-1.51.2.tar.gz", hash = "sha256:c6a51fac62a1ca9df85a522e462918f6bb6bc51a8897032217e453a0730123a6"}, + {file = "openai-1.52.2-py3-none-any.whl", hash = "sha256:57e9e37bc407f39bb6ec3a27d7e8fb9728b2779936daa1fcf95df17d3edfaccc"}, + {file = "openai-1.52.2.tar.gz", hash = "sha256:87b7d0f69d85f5641678d414b7ee3082363647a5c66a462ed7f3ccb59582da0d"}, ] [package.dependencies] @@ -6089,68 +6126,69 @@ cryptography = ">=3.2.1" [[package]] name = "orjson" -version = "3.10.7" +version = "3.10.10" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, - {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, - {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, - {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, - {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, - {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, - {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, - {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, - {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, - {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, - {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, - {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, - {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, - {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, - {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, - {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, - {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, - {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, - {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, - {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, - {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, - {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, - {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, - {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, - {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, - {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, - {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, - {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, - {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, - {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, - {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, - {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, + {file = "orjson-3.10.10-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b788a579b113acf1c57e0a68e558be71d5d09aa67f62ca1f68e01117e550a998"}, + {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:804b18e2b88022c8905bb79bd2cbe59c0cd014b9328f43da8d3b28441995cda4"}, + {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9972572a1d042ec9ee421b6da69f7cc823da5962237563fa548ab17f152f0b9b"}, + {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc6993ab1c2ae7dd0711161e303f1db69062955ac2668181bfdf2dd410e65258"}, + {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d78e4cacced5781b01d9bc0f0cd8b70b906a0e109825cb41c1b03f9c41e4ce86"}, + {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6eb2598df518281ba0cbc30d24c5b06124ccf7e19169e883c14e0831217a0bc"}, + {file = "orjson-3.10.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23776265c5215ec532de6238a52707048401a568f0fa0d938008e92a147fe2c7"}, + {file = "orjson-3.10.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8cc2a654c08755cef90b468ff17c102e2def0edd62898b2486767204a7f5cc9c"}, + {file = "orjson-3.10.10-cp310-none-win32.whl", hash = "sha256:081b3fc6a86d72efeb67c13d0ea7c030017bd95f9868b1e329a376edc456153b"}, + {file = "orjson-3.10.10-cp310-none-win_amd64.whl", hash = "sha256:ff38c5fb749347768a603be1fb8a31856458af839f31f064c5aa74aca5be9efe"}, + {file = "orjson-3.10.10-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:879e99486c0fbb256266c7c6a67ff84f46035e4f8749ac6317cc83dacd7f993a"}, + {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019481fa9ea5ff13b5d5d95e6fd5ab25ded0810c80b150c2c7b1cc8660b662a7"}, + {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0dd57eff09894938b4c86d4b871a479260f9e156fa7f12f8cad4b39ea8028bb5"}, + {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dbde6d70cd95ab4d11ea8ac5e738e30764e510fc54d777336eec09bb93b8576c"}, + {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2625cb37b8fb42e2147404e5ff7ef08712099197a9cd38895006d7053e69d6"}, + {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbf3c20c6a7db69df58672a0d5815647ecf78c8e62a4d9bd284e8621c1fe5ccb"}, + {file = "orjson-3.10.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:75c38f5647e02d423807d252ce4528bf6a95bd776af999cb1fb48867ed01d1f6"}, + {file = "orjson-3.10.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:23458d31fa50ec18e0ec4b0b4343730928296b11111df5f547c75913714116b2"}, + {file = "orjson-3.10.10-cp311-none-win32.whl", hash = "sha256:2787cd9dedc591c989f3facd7e3e86508eafdc9536a26ec277699c0aa63c685b"}, + {file = "orjson-3.10.10-cp311-none-win_amd64.whl", hash = "sha256:6514449d2c202a75183f807bc755167713297c69f1db57a89a1ef4a0170ee269"}, + {file = "orjson-3.10.10-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8564f48f3620861f5ef1e080ce7cd122ee89d7d6dacf25fcae675ff63b4d6e05"}, + {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5bf161a32b479034098c5b81f2608f09167ad2fa1c06abd4e527ea6bf4837a9"}, + {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68b65c93617bcafa7f04b74ae8bc2cc214bd5cb45168a953256ff83015c6747d"}, + {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8e28406f97fc2ea0c6150f4c1b6e8261453318930b334abc419214c82314f85"}, + {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4d0d9fe174cc7a5bdce2e6c378bcdb4c49b2bf522a8f996aa586020e1b96cee"}, + {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3be81c42f1242cbed03cbb3973501fcaa2675a0af638f8be494eaf37143d999"}, + {file = "orjson-3.10.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65f9886d3bae65be026219c0a5f32dbbe91a9e6272f56d092ab22561ad0ea33b"}, + {file = "orjson-3.10.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:730ed5350147db7beb23ddaf072f490329e90a1d059711d364b49fe352ec987b"}, + {file = "orjson-3.10.10-cp312-none-win32.whl", hash = "sha256:a8f4bf5f1c85bea2170800020d53a8877812892697f9c2de73d576c9307a8a5f"}, + {file = "orjson-3.10.10-cp312-none-win_amd64.whl", hash = "sha256:384cd13579a1b4cd689d218e329f459eb9ddc504fa48c5a83ef4889db7fd7a4f"}, + {file = "orjson-3.10.10-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44bffae68c291f94ff5a9b4149fe9d1bdd4cd0ff0fb575bcea8351d48db629a1"}, + {file = "orjson-3.10.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e27b4c6437315df3024f0835887127dac2a0a3ff643500ec27088d2588fa5ae1"}, + {file = "orjson-3.10.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca84df16d6b49325a4084fd8b2fe2229cb415e15c46c529f868c3387bb1339d"}, + {file = "orjson-3.10.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c14ce70e8f39bd71f9f80423801b5d10bf93d1dceffdecd04df0f64d2c69bc01"}, + {file = "orjson-3.10.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:24ac62336da9bda1bd93c0491eff0613003b48d3cb5d01470842e7b52a40d5b4"}, + {file = "orjson-3.10.10-cp313-none-win32.whl", hash = "sha256:eb0a42831372ec2b05acc9ee45af77bcaccbd91257345f93780a8e654efc75db"}, + {file = "orjson-3.10.10-cp313-none-win_amd64.whl", hash = "sha256:f0c4f37f8bf3f1075c6cc8dd8a9f843689a4b618628f8812d0a71e6968b95ffd"}, + {file = "orjson-3.10.10-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:829700cc18503efc0cf502d630f612884258020d98a317679cd2054af0259568"}, + {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0ceb5e0e8c4f010ac787d29ae6299846935044686509e2f0f06ed441c1ca949"}, + {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c25908eb86968613216f3db4d3003f1c45d78eb9046b71056ca327ff92bdbd4"}, + {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:218cb0bc03340144b6328a9ff78f0932e642199ac184dd74b01ad691f42f93ff"}, + {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2277ec2cea3775640dc81ab5195bb5b2ada2fe0ea6eee4677474edc75ea6785"}, + {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:848ea3b55ab5ccc9d7bbd420d69432628b691fba3ca8ae3148c35156cbd282aa"}, + {file = "orjson-3.10.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e3e67b537ac0c835b25b5f7d40d83816abd2d3f4c0b0866ee981a045287a54f3"}, + {file = "orjson-3.10.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7948cfb909353fce2135dcdbe4521a5e7e1159484e0bb024c1722f272488f2b8"}, + {file = "orjson-3.10.10-cp38-none-win32.whl", hash = "sha256:78bee66a988f1a333dc0b6257503d63553b1957889c17b2c4ed72385cd1b96ae"}, + {file = "orjson-3.10.10-cp38-none-win_amd64.whl", hash = "sha256:f1d647ca8d62afeb774340a343c7fc023efacfd3a39f70c798991063f0c681dd"}, + {file = "orjson-3.10.10-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5a059afddbaa6dd733b5a2d76a90dbc8af790b993b1b5cb97a1176ca713b5df8"}, + {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f9b5c59f7e2a1a410f971c5ebc68f1995822837cd10905ee255f96074537ee6"}, + {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d5ef198bafdef4aa9d49a4165ba53ffdc0a9e1c7b6f76178572ab33118afea25"}, + {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf29ce0bb5d3320824ec3d1508652421000ba466abd63bdd52c64bcce9eb1fa"}, + {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dddd5516bcc93e723d029c1633ae79c4417477b4f57dad9bfeeb6bc0315e654a"}, + {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12f2003695b10817f0fa8b8fca982ed7f5761dcb0d93cff4f2f9f6709903fd7"}, + {file = "orjson-3.10.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:672f9874a8a8fb9bb1b771331d31ba27f57702c8106cdbadad8bda5d10bc1019"}, + {file = "orjson-3.10.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1dcbb0ca5fafb2b378b2c74419480ab2486326974826bbf6588f4dc62137570a"}, + {file = "orjson-3.10.10-cp39-none-win32.whl", hash = "sha256:d9bbd3a4b92256875cb058c3381b782649b9a3c68a4aa9a2fff020c2f9cfc1be"}, + {file = "orjson-3.10.10-cp39-none-win_amd64.whl", hash = "sha256:766f21487a53aee8524b97ca9582d5c6541b03ab6210fbaf10142ae2f3ced2aa"}, + {file = "orjson-3.10.10.tar.gz", hash = "sha256:37949383c4df7b4337ce82ee35b6d7471e55195efa7dcb45ab8226ceadb0fe3b"}, ] [[package]] @@ -6307,12 +6345,12 @@ ppft = ">=1.7.6.9" [[package]] name = "peewee" -version = "3.17.6" +version = "3.17.7" description = "a little orm" optional = false python-versions = "*" files = [ - {file = "peewee-3.17.6.tar.gz", hash = "sha256:cea5592c6f4da1592b7cff8eaf655be6648a1f5857469e30037bf920c03fb8fb"}, + {file = "peewee-3.17.7.tar.gz", hash = "sha256:6aefc700bd530fc6ac23fa19c9c5b47041751d92985b799169c8e318e97eabaa"}, ] [[package]] @@ -6352,95 +6390,90 @@ numpy = "*" [[package]] name = "pillow" -version = "10.4.0" +version = "11.0.0" description = "Python Imaging Library (Fork)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, - {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, - {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, - {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, - {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, - {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, - {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, - {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, - {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947"}, + {file = "pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488"}, + {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb"}, + {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97"}, + {file = "pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50"}, + {file = "pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c"}, + {file = "pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1c1d72714f429a521d8d2d018badc42414c3077eb187a59579f28e4270b4b0fc"}, + {file = "pillow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:499c3a1b0d6fc8213519e193796eb1a86a1be4b1877d678b30f83fd979811d1a"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8b2351c85d855293a299038e1f89db92a2f35e8d2f783489c6f0b2b5f3fe8a3"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4dba50cfa56f910241eb7f883c20f1e7b1d8f7d91c750cd0b318bad443f4d5"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5ddbfd761ee00c12ee1be86c9c0683ecf5bb14c9772ddbd782085779a63dd55b"}, + {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:45c566eb10b8967d71bf1ab8e4a525e5a93519e29ea071459ce517f6b903d7fa"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b4fd7bd29610a83a8c9b564d457cf5bd92b4e11e79a4ee4716a63c959699b306"}, + {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cb929ca942d0ec4fac404cbf520ee6cac37bf35be479b970c4ffadf2b6a1cad9"}, + {file = "pillow-11.0.0-cp311-cp311-win32.whl", hash = "sha256:006bcdd307cc47ba43e924099a038cbf9591062e6c50e570819743f5607404f5"}, + {file = "pillow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:52a2d8323a465f84faaba5236567d212c3668f2ab53e1c74c15583cf507a0291"}, + {file = "pillow-11.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:16095692a253047fe3ec028e951fa4221a1f3ed3d80c397e83541a3037ff67c9"}, + {file = "pillow-11.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2c0a187a92a1cb5ef2c8ed5412dd8d4334272617f532d4ad4de31e0495bd923"}, + {file = "pillow-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:084a07ef0821cfe4858fe86652fffac8e187b6ae677e9906e192aafcc1b69903"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8069c5179902dcdce0be9bfc8235347fdbac249d23bd90514b7a47a72d9fecf4"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f02541ef64077f22bf4924f225c0fd1248c168f86e4b7abdedd87d6ebaceab0f"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fcb4621042ac4b7865c179bb972ed0da0218a076dc1820ffc48b1d74c1e37fe9"}, + {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:00177a63030d612148e659b55ba99527803288cea7c75fb05766ab7981a8c1b7"}, + {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8853a3bf12afddfdf15f57c4b02d7ded92c7a75a5d7331d19f4f9572a89c17e6"}, + {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3107c66e43bda25359d5ef446f59c497de2b5ed4c7fdba0894f8d6cf3822dafc"}, + {file = "pillow-11.0.0-cp312-cp312-win32.whl", hash = "sha256:86510e3f5eca0ab87429dd77fafc04693195eec7fd6a137c389c3eeb4cfb77c6"}, + {file = "pillow-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8ec4a89295cd6cd4d1058a5e6aec6bf51e0eaaf9714774e1bfac7cfc9051db47"}, + {file = "pillow-11.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:27a7860107500d813fcd203b4ea19b04babe79448268403172782754870dac25"}, + {file = "pillow-11.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcd1fb5bb7b07f64c15618c89efcc2cfa3e95f0e3bcdbaf4642509de1942a699"}, + {file = "pillow-11.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e038b0745997c7dcaae350d35859c9715c71e92ffb7e0f4a8e8a16732150f38"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ae08bd8ffc41aebf578c2af2f9d8749d91f448b3bfd41d7d9ff573d74f2a6b2"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d69bfd8ec3219ae71bcde1f942b728903cad25fafe3100ba2258b973bd2bc1b2"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:61b887f9ddba63ddf62fd02a3ba7add935d053b6dd7d58998c630e6dbade8527"}, + {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:c6a660307ca9d4867caa8d9ca2c2658ab685de83792d1876274991adec7b93fa"}, + {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:73e3a0200cdda995c7e43dd47436c1548f87a30bb27fb871f352a22ab8dcf45f"}, + {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fba162b8872d30fea8c52b258a542c5dfd7b235fb5cb352240c8d63b414013eb"}, + {file = "pillow-11.0.0-cp313-cp313-win32.whl", hash = "sha256:f1b82c27e89fffc6da125d5eb0ca6e68017faf5efc078128cfaa42cf5cb38798"}, + {file = "pillow-11.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:8ba470552b48e5835f1d23ecb936bb7f71d206f9dfeee64245f30c3270b994de"}, + {file = "pillow-11.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:846e193e103b41e984ac921b335df59195356ce3f71dcfd155aa79c603873b84"}, + {file = "pillow-11.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4ad70c4214f67d7466bea6a08061eba35c01b1b89eaa098040a35272a8efb22b"}, + {file = "pillow-11.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6ec0d5af64f2e3d64a165f490d96368bb5dea8b8f9ad04487f9ab60dc4bb6003"}, + {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c809a70e43c7977c4a42aefd62f0131823ebf7dd73556fa5d5950f5b354087e2"}, + {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:4b60c9520f7207aaf2e1d94de026682fc227806c6e1f55bba7606d1c94dd623a"}, + {file = "pillow-11.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1e2688958a840c822279fda0086fec1fdab2f95bf2b717b66871c4ad9859d7e8"}, + {file = "pillow-11.0.0-cp313-cp313t-win32.whl", hash = "sha256:607bbe123c74e272e381a8d1957083a9463401f7bd01287f50521ecb05a313f8"}, + {file = "pillow-11.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c39ed17edea3bc69c743a8dd3e9853b7509625c2462532e62baa0732163a904"}, + {file = "pillow-11.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:75acbbeb05b86bc53cbe7b7e6fe00fbcf82ad7c684b3ad82e3d711da9ba287d3"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2e46773dc9f35a1dd28bd6981332fd7f27bec001a918a72a79b4133cf5291dba"}, + {file = "pillow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2679d2258b7f1192b378e2893a8a0a0ca472234d4c2c0e6bdd3380e8dfa21b6a"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda2616eb2313cbb3eebbe51f19362eb434b18e3bb599466a1ffa76a033fb916"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ec184af98a121fb2da42642dea8a29ec80fc3efbaefb86d8fdd2606619045d"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:8594f42df584e5b4bb9281799698403f7af489fba84c34d53d1c4bfb71b7c4e7"}, + {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c12b5ae868897c7338519c03049a806af85b9b8c237b7d675b8c5e089e4a618e"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:70fbbdacd1d271b77b7721fe3cdd2d537bbbd75d29e6300c672ec6bb38d9672f"}, + {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5178952973e588b3f1360868847334e9e3bf49d19e169bbbdfaf8398002419ae"}, + {file = "pillow-11.0.0-cp39-cp39-win32.whl", hash = "sha256:8c676b587da5673d3c75bd67dd2a8cdfeb282ca38a30f37950511766b26858c4"}, + {file = "pillow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:94f3e1780abb45062287b4614a5bc0874519c86a777d4a7ad34978e86428b8dd"}, + {file = "pillow-11.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:290f2cc809f9da7d6d622550bbf4c1e57518212da51b6a30fe8e0a270a5b78bd"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734"}, + {file = "pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bd2d3bdb846d757055910f0a59792d33b555800813c3b39ada1829c372ccb06"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:375b8dd15a1f5d2feafff536d47e22f69625c1aa92f12b339ec0b2ca40263273"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:daffdf51ee5db69a82dd127eabecce20729e21f7a3680cf7cbb23f0829189790"}, + {file = "pillow-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7326a1787e3c7b0429659e0a944725e1b03eeaa10edd945a86dead1913383944"}, + {file = "pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] @@ -6525,20 +6558,20 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p [[package]] name = "postgrest" -version = "0.17.1" +version = "0.17.2" description = "PostgREST client for Python. This library provides an ORM interface to PostgREST." optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "postgrest-0.17.1-py3-none-any.whl", hash = "sha256:ec1d00dc8532fe5ffb342cfc7c4e610a1e0e2272eb14f78f9b2b61094f9be510"}, - {file = "postgrest-0.17.1.tar.gz", hash = "sha256:e31d9977dbb80dc5f9fdd4d444014686606692dc4ddb9adc85639e56c6d54c92"}, + {file = "postgrest-0.17.2-py3-none-any.whl", hash = "sha256:f7c4f448e5a5e2d4c1dcf192edae9d1007c4261e9a6fb5116783a0046846ece2"}, + {file = "postgrest-0.17.2.tar.gz", hash = "sha256:445cd4e4a191e279492549df0c4e827d32f9d01d0852599bb8a6efb0f07fcf78"}, ] [package.dependencies] deprecation = ">=2.1.0,<3.0.0" httpx = {version = ">=0.26,<0.28", extras = ["http2"]} pydantic = ">=1.9,<3.0" -strenum = ">=0.4.9,<0.5.0" +strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""} [[package]] name = "posthog" @@ -6590,19 +6623,19 @@ dill = ["dill (>=0.3.9)"] [[package]] name = "primp" -version = "0.6.3" +version = "0.6.4" description = "HTTP client that can impersonate web browsers, mimicking their headers and `TLS/JA3/JA4/HTTP2` fingerprints" optional = false python-versions = ">=3.8" files = [ - {file = "primp-0.6.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bdbe6a7cdaaf5c9ed863432a941f4a75bd4c6ff626cbc8d32fc232793c70ba06"}, - {file = "primp-0.6.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:eeb53eb987bdcbcd85740633470255cab887d921df713ffa12a36a13366c9cdb"}, - {file = "primp-0.6.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78da53d3c92a8e3f05bd3286ac76c291f1b6fe5e08ea63b7ba92b0f9141800bb"}, - {file = "primp-0.6.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:86337b44deecdac752bd8112909987fc9fa9b894f30191c80a164dc8f895da53"}, - {file = "primp-0.6.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d3cd9a22b97f3eae42b2a5fb99f00480daf4cd6d9b139e05b0ffb03f7cc037f3"}, - {file = "primp-0.6.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7732bec917e2d3c48a31cdb92e1250f4ad6203a1aa4f802bd9abd84f2286a1e0"}, - {file = "primp-0.6.3-cp38-abi3-win_amd64.whl", hash = "sha256:1e4113c34b86c676ae321af185f03a372caef3ee009f1682c2d62e30ec87348c"}, - {file = "primp-0.6.3.tar.gz", hash = "sha256:17d30ebe26864defad5232dbbe1372e80483940012356e1f68846bb182282039"}, + {file = "primp-0.6.4-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:e627330c1f2b723b523dc2e47caacbc5b5d0cd51ca11583b42fb8cde4da60d7d"}, + {file = "primp-0.6.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:e0cb7c05dd56c8b9741042fd568c0983fc19b0f3aa209a3940ecc04b4fd60314"}, + {file = "primp-0.6.4-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4adc200ccb39e130c478d8b1a94f43a5b359068c6cb65b7c848812f96d96992"}, + {file = "primp-0.6.4-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:0ebae2d3aa36b04028e4accf2609d31d2e6981659e8e2effb09ee8ba960192e1"}, + {file = "primp-0.6.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:77f5fa5b34eaf251815622258419a484a2a9179dcbae2a1e702a254d91f613f1"}, + {file = "primp-0.6.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:14cddf535cd2c4987412e90ca3ca35ae52cddbee6e0f0953d26b33a652a95692"}, + {file = "primp-0.6.4-cp38-abi3-win_amd64.whl", hash = "sha256:96177ec2dadc47eaecbf0b22d2e93aeaf964a1be9a71e6e318d2ffb9e4242743"}, + {file = "primp-0.6.4.tar.gz", hash = "sha256:0a3de63e46a50664bcdc76e7aaf7060bf8443698efa902864669c5fca0d1abdd"}, ] [package.extras] @@ -6624,13 +6657,13 @@ wcwidth = "*" [[package]] name = "proto-plus" -version = "1.24.0" +version = "1.25.0" description = "Beautiful, Pythonic protocol buffers." optional = false python-versions = ">=3.7" files = [ - {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, - {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, + {file = "proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961"}, + {file = "proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91"}, ] [package.dependencies] @@ -6661,112 +6694,108 @@ files = [ [[package]] name = "psutil" -version = "6.0.0" +version = "6.1.0" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, - {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, - {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, - {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, - {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, - {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, - {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, - {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, - {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, - {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, + {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, + {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, + {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, + {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, + {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, + {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, + {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, + {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, ] [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "psycopg2-binary" -version = "2.9.9" +version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, + {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, ] [[package]] @@ -6821,19 +6850,6 @@ files = [ {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"}, {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"}, {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"}, - {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"}, - {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"}, - {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"}, - {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"}, - {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"}, ] [package.dependencies] @@ -6921,119 +6937,120 @@ files = [ [[package]] name = "pydantic" -version = "2.8.2" +version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.4" typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.23.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] @@ -7063,13 +7080,13 @@ semver = ["semver (>=3.0.2)"] [[package]] name = "pydantic-settings" -version = "2.4.0" +version = "2.6.0" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, - {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, + {file = "pydantic_settings-2.6.0-py3-none-any.whl", hash = "sha256:4a819166f119b74d7f8c765196b165f95cc7487ce58ea27dec8a5a26be0970e0"}, + {file = "pydantic_settings-2.6.0.tar.gz", hash = "sha256:44a1804abffac9e6a30372bb45f6cafab945ef5af25e66b1c634c01dd39e0188"}, ] [package.dependencies] @@ -7098,6 +7115,17 @@ typing-extensions = ">3.10,<4.6.0 || >4.6.0" [package.extras] dev = ["build", "coverage", "furo", "invoke", "mypy", "pytest", "pytest-cov", "pytest-mypy-testing", "ruff", "sphinx", "sphinx-autodoc-typehints", "tox", "twine", "wheel"] +[[package]] +name = "pydub" +version = "0.25.1" +description = "Manipulate audio with an simple and easy high level interface" +optional = false +python-versions = "*" +files = [ + {file = "pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6"}, + {file = "pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f"}, +] + [[package]] name = "pygments" version = "2.18.0" @@ -7231,6 +7259,27 @@ files = [ [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pypdf" +version = "5.0.1" +description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pypdf-5.0.1-py3-none-any.whl", hash = "sha256:ff8a32da6c7a63fea9c32fa4dd837cdd0db7966adf6c14f043e3f12592e992db"}, + {file = "pypdf-5.0.1.tar.gz", hash = "sha256:a361c3c372b4a659f9c8dd438d5ce29a753c79c620dc6e1fd66977651f5547ea"}, +] + +[package.dependencies] +typing_extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} + +[package.extras] +crypto = ["PyCryptodome", "cryptography"] +dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"] +docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] +full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] +image = ["Pillow (>=8.0.0)"] + [[package]] name = "pypdfium2" version = "4.17.0" @@ -7486,13 +7535,13 @@ files = [ [[package]] name = "python-dateutil" -version = "2.9.0.post0" +version = "2.8.2" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] [package.dependencies] @@ -7529,17 +7578,17 @@ cli = ["click (>=5.0)"] [[package]] name = "python-iso639" -version = "2024.4.27" +version = "2024.10.22" description = "ISO 639 language codes, names, and other associated information" optional = false python-versions = ">=3.8" files = [ - {file = "python_iso639-2024.4.27-py3-none-any.whl", hash = "sha256:27526a84cebc4c4d53fea9d1ebbc7209c8d279bebaa343e6765a1fc8780565ab"}, - {file = "python_iso639-2024.4.27.tar.gz", hash = "sha256:97e63b5603e085c6a56a12a95740010e75d9134e0aab767e0978b53fd8824f13"}, + {file = "python_iso639-2024.10.22-py3-none-any.whl", hash = "sha256:02d3ce2e01c6896b30b9cbbd3e1c8ee0d7221250b5d63ea9803e0d2a81fd1047"}, + {file = "python_iso639-2024.10.22.tar.gz", hash = "sha256:750f21b6a0bc6baa24253a3d8aae92b582bf93aa40988361cd96852c2c6d9a52"}, ] [package.extras] -dev = ["black (==24.4.2)", "build (==1.2.1)", "flake8 (==7.0.0)", "pytest (==8.1.2)", "requests (==2.31.0)", "twine (==5.0.0)"] +dev = ["black (==24.10.0)", "build (==1.2.1)", "flake8 (==7.1.1)", "pytest (==8.3.3)", "requests (==2.32.3)", "twine (==5.1.1)"] [[package]] name = "python-magic" @@ -7553,19 +7602,36 @@ files = [ ] [[package]] -name = "python-pptx" -version = "0.6.23" -description = "Generate and manipulate Open XML PowerPoint (.pptx) files" +name = "python-oxmsg" +version = "0.0.1" +description = "Extract attachments from Outlook .msg files." optional = false -python-versions = "*" +python-versions = ">=3.9" files = [ - {file = "python-pptx-0.6.23.tar.gz", hash = "sha256:587497ff28e779ab18dbb074f6d4052893c85dedc95ed75df319364f331fedee"}, - {file = "python_pptx-0.6.23-py3-none-any.whl", hash = "sha256:dd0527194627a2b7cc05f3ba23ecaa2d9a0d5ac9b6193a28ed1b7a716f4217d4"}, + {file = "python_oxmsg-0.0.1-py3-none-any.whl", hash = "sha256:8ea7d5dda1bc161a413213da9e18ed152927c1fda2feaf5d1f02192d8ad45eea"}, + {file = "python_oxmsg-0.0.1.tar.gz", hash = "sha256:b65c1f93d688b85a9410afa824192a1ddc39da359b04a0bd2cbd3874e84d4994"}, +] + +[package.dependencies] +click = "*" +olefile = "*" +typing-extensions = ">=4.9.0" + +[[package]] +name = "python-pptx" +version = "1.0.2" +description = "Create, read, and update PowerPoint 2007+ (.pptx) files." +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba"}, + {file = "python_pptx-1.0.2.tar.gz", hash = "sha256:479a8af0eaf0f0d76b6f00b0887732874ad2e3188230315290cd1f9dd9cc7095"}, ] [package.dependencies] lxml = ">=3.1.0" Pillow = ">=3.3.2" +typing-extensions = ">=4.9.0" XlsxWriter = ">=0.5.7" [[package]] @@ -8139,13 +8205,13 @@ py = ">=1.4.26,<2.0.0" [[package]] name = "rich" -version = "13.9.2" +version = "13.9.3" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" files = [ - {file = "rich-13.9.2-py3-none-any.whl", hash = "sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1"}, - {file = "rich-13.9.2.tar.gz", hash = "sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c"}, + {file = "rich-13.9.3-py3-none-any.whl", hash = "sha256:9836f5096eb2172c9e77df411c1b009bace4193d6a481d534fea75ebba758283"}, + {file = "rich-13.9.3.tar.gz", hash = "sha256:bc1e01b899537598cf02579d2b9f4a415104d3fc439313a7a2c165d76557a08e"}, ] [package.dependencies] @@ -8558,11 +8624,6 @@ files = [ {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"}, {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"}, {file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"}, - {file = "scikit_learn-1.5.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5"}, - {file = "scikit_learn-1.5.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908"}, - {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3"}, - {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12"}, - {file = "scikit_learn-1.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f"}, {file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"}, {file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"}, {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"}, @@ -8688,13 +8749,13 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "75.1.0" +version = "75.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, - {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, + {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, + {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, ] [package.extras] @@ -8860,60 +8921,68 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.35" +version = "2.0.36" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, - {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, - {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, - {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, - {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, - {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, - {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, - {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, - {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, + {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, + {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, ] [package.dependencies] @@ -8926,7 +8995,7 @@ aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] @@ -8962,13 +9031,13 @@ doc = ["sphinx"] [[package]] name = "starlette" -version = "0.39.2" +version = "0.41.0" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.39.2-py3-none-any.whl", hash = "sha256:134dd6deb655a9775991d352312d53f1879775e5cc8a481f966e83416a2c3f71"}, - {file = "starlette-0.39.2.tar.gz", hash = "sha256:caaa3b87ef8518ef913dac4f073dea44e85f73343ad2bdc17941931835b2a26a"}, + {file = "starlette-0.41.0-py3-none-any.whl", hash = "sha256:a0193a3c413ebc9c78bff1c3546a45bb8c8bcb4a84cae8747d650a65bd37210a"}, + {file = "starlette-0.41.0.tar.gz", hash = "sha256:39cbd8768b107d68bfe1ff1672b38a2c38b49777de46d2a592841d58e3bf7c2a"}, ] [package.dependencies] @@ -8979,13 +9048,13 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 [[package]] name = "storage3" -version = "0.8.1" +version = "0.8.2" description = "Supabase Storage client for Python." optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "storage3-0.8.1-py3-none-any.whl", hash = "sha256:0b21205f43eaf0d1dd33bde6c6d0612f88524b7865f017d2ae9827e3f63d9cdc"}, - {file = "storage3-0.8.1.tar.gz", hash = "sha256:ea60b68b2221b3868ccc1a7f1294d57d0d9c51642cdc639d8115fe5d0adc8892"}, + {file = "storage3-0.8.2-py3-none-any.whl", hash = "sha256:f2e995b18c77a2a9265d1a33047d43e4d6abb11eb3ca5067959f68281c305de3"}, + {file = "storage3-0.8.2.tar.gz", hash = "sha256:db05d3fe8fb73bd30c814c4c4749664f37a5dfc78b629e8c058ef558c2b89f5a"}, ] [package.dependencies] @@ -9045,13 +9114,13 @@ typing-extensions = ">=4.12.2,<5.0.0" [[package]] name = "supafunc" -version = "0.6.1" +version = "0.6.2" description = "Library for Supabase Functions" optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "supafunc-0.6.1-py3-none-any.whl", hash = "sha256:01aeeeb4bf429977664454a32c86418345140faf6d2e6eb0636d52e4547c5fbb"}, - {file = "supafunc-0.6.1.tar.gz", hash = "sha256:3c8761e3999336ccdb7550498a395fd08afc8469382f55ea56f7f640e5a909aa"}, + {file = "supafunc-0.6.2-py3-none-any.whl", hash = "sha256:101b30616b0a1ce8cf938eca1df362fa4cf1deacb0271f53ebbd674190fb0da5"}, + {file = "supafunc-0.6.2.tar.gz", hash = "sha256:c7dfa20db7182f7fe4ae436e94e05c06cd7ed98d697fed75d68c7b9792822adc"}, ] [package.dependencies] @@ -9131,13 +9200,13 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "tencentcloud-sdk-python-common" -version = "3.0.1250" +version = "3.0.1257" description = "Tencent Cloud Common SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-common-3.0.1250.tar.gz", hash = "sha256:97c15c3f2ffbde60550656eab3e9337d9e0ec8958a533f223c5d5caa2762b6e9"}, - {file = "tencentcloud_sdk_python_common-3.0.1250-py2.py3-none-any.whl", hash = "sha256:e369dee2d920ee365a8e2d314d563d243f2e73f5bc6bd2886f96534c9d00c3a7"}, + {file = "tencentcloud-sdk-python-common-3.0.1257.tar.gz", hash = "sha256:e10b155d598a60c43a491be10f40f7dae5774a2187d55f2da83bdb559434f3c4"}, + {file = "tencentcloud_sdk_python_common-3.0.1257-py2.py3-none-any.whl", hash = "sha256:f474a2969f3cbff91f45780f18bfbb90ab53f66c0085c4e9b4e07c2fcf0e71d9"}, ] [package.dependencies] @@ -9145,17 +9214,17 @@ requests = ">=2.16.0" [[package]] name = "tencentcloud-sdk-python-hunyuan" -version = "3.0.1250" +version = "3.0.1257" description = "Tencent Cloud Hunyuan SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-hunyuan-3.0.1250.tar.gz", hash = "sha256:ac95085edee2a95c69326b2fd6a0f61116fc5d214d5c8cf14a1b42bbb262dba8"}, - {file = "tencentcloud_sdk_python_hunyuan-3.0.1250-py2.py3-none-any.whl", hash = "sha256:caac95c47348639452a78d39cdcb87257f97cec3b52398e3be97a5b8c4c5e496"}, + {file = "tencentcloud-sdk-python-hunyuan-3.0.1257.tar.gz", hash = "sha256:4d38505089bed70dda1f806f8c4835f8a8c520efa86dcecfef444045c21b695d"}, + {file = "tencentcloud_sdk_python_hunyuan-3.0.1257-py2.py3-none-any.whl", hash = "sha256:c9089d3e49304c9c20e7465c82372b2cd234e67f63efdffb6798a4093b3a97c6"}, ] [package.dependencies] -tencentcloud-sdk-python-common = "3.0.1250" +tencentcloud-sdk-python-common = "3.0.1257" [[package]] name = "threadpoolctl" @@ -9414,12 +9483,12 @@ files = [ [[package]] name = "tos" -version = "2.7.1" +version = "2.7.2" description = "Volc TOS (Tinder Object Storage) SDK" optional = false python-versions = "*" files = [ - {file = "tos-2.7.1.tar.gz", hash = "sha256:4bccdbff3cfd63eb44648bb44862903708c4b3e790f0dd55c96305baaeece805"}, + {file = "tos-2.7.2.tar.gz", hash = "sha256:3c31257716785bca7b2cac51474ff32543cda94075a7b7aff70d769c15c7b7ed"}, ] [package.dependencies] @@ -9553,13 +9622,13 @@ typing-extensions = ">=3.7.4.3" [[package]] name = "types-requests" -version = "2.32.0.20240914" +version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.32.0.20240914.tar.gz", hash = "sha256:2850e178db3919d9bf809e434eef65ba49d0e7e33ac92d588f4a5e295fffd405"}, - {file = "types_requests-2.32.0.20240914-py3-none-any.whl", hash = "sha256:59c2f673eb55f32a99b2894faf6020e1a9f4a402ad0f192bfee0b64469054310"}, + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, ] [package.dependencies] @@ -9691,13 +9760,13 @@ files = [ [[package]] name = "unstructured" -version = "0.10.30" +version = "0.16.1" description = "A library that prepares raw documents for downstream ML tasks." optional = false -python-versions = ">=3.7.0" +python-versions = "<3.13,>=3.9.0" files = [ - {file = "unstructured-0.10.30-py3-none-any.whl", hash = "sha256:0615f14daa37450e9c0fcf3c3fd178c3a06b6b8d006a36d1a5e54dbe487aa6b6"}, - {file = "unstructured-0.10.30.tar.gz", hash = "sha256:a86c3d15c572a28322d83cb5ecf0ac7a24f1c36864fb7c68df096de8a1acc106"}, + {file = "unstructured-0.16.1-py3-none-any.whl", hash = "sha256:7512281a2917809a563cbb186876b77d5a361e1f3089eca61e9219aecd1218f9"}, + {file = "unstructured-0.16.1.tar.gz", hash = "sha256:03608b5189a004412cd618ce2d083ff926c56dbbca41b41c92e08ffa9e2bac3a"}, ] [package.dependencies] @@ -9707,71 +9776,84 @@ chardet = "*" dataclasses-json = "*" emoji = "*" filetype = "*" +html5lib = "*" langdetect = "*" lxml = "*" markdown = {version = "*", optional = true, markers = "extra == \"md\""} -msg-parser = {version = "*", optional = true, markers = "extra == \"msg\""} nltk = "*" -numpy = "*" +numpy = "<2" +psutil = "*" pypandoc = {version = "*", optional = true, markers = "extra == \"epub\""} -python-docx = {version = ">=1.1.0", optional = true, markers = "extra == \"docx\""} +python-docx = {version = ">=1.1.2", optional = true, markers = "extra == \"docx\""} python-iso639 = "*" python-magic = "*" -python-pptx = {version = "<=0.6.23", optional = true, markers = "extra == \"ppt\" or extra == \"pptx\""} +python-oxmsg = "*" +python-pptx = {version = ">=1.0.1", optional = true, markers = "extra == \"ppt\" or extra == \"pptx\""} rapidfuzz = "*" requests = "*" -tabulate = "*" +tqdm = "*" typing-extensions = "*" +unstructured-client = "*" +wrapt = "*" [package.extras] -airtable = ["pyairtable"] -all-docs = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.1.0)", "python-pptx (<=0.6.23)", "unstructured-inference (==0.7.11)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] -azure = ["adlfs", "fsspec (==2023.9.1)"] -azure-cognitive-search = ["azure-search-documents"] -bedrock = ["boto3", "langchain"] -biomed = ["bs4"] -box = ["boxfs", "fsspec (==2023.9.1)"] -confluence = ["atlassian-python-api"] +all-docs = ["effdet", "google-cloud-vision", "markdown", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pi-heif", "pikepdf", "pypandoc", "pypdf", "python-docx (>=1.1.2)", "python-pptx (>=1.0.1)", "unstructured-inference (==0.8.0)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] csv = ["pandas"] -delta-table = ["deltalake", "fsspec (==2023.9.1)"] -discord = ["discord-py"] -doc = ["python-docx (>=1.1.0)"] -docx = ["python-docx (>=1.1.0)"] -dropbox = ["dropboxdrivefs", "fsspec (==2023.9.1)"] -elasticsearch = ["elasticsearch", "jq"] -embed-huggingface = ["huggingface", "langchain", "sentence-transformers"] +doc = ["python-docx (>=1.1.2)"] +docx = ["python-docx (>=1.1.2)"] epub = ["pypandoc"] -gcs = ["bs4", "fsspec (==2023.9.1)", "gcsfs"] -github = ["pygithub (>1.58.0)"] -gitlab = ["python-gitlab"] -google-drive = ["google-api-python-client"] huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] -image = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.11)", "unstructured.pytesseract (>=0.3.12)"] -jira = ["atlassian-python-api"] -local-inference = ["markdown", "msg-parser", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx (>=1.1.0)", "python-pptx (<=0.6.23)", "unstructured-inference (==0.7.11)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +image = ["effdet", "google-cloud-vision", "onnx", "pdf2image", "pdfminer.six", "pi-heif", "pikepdf", "pypdf", "unstructured-inference (==0.8.0)", "unstructured.pytesseract (>=0.3.12)"] +local-inference = ["effdet", "google-cloud-vision", "markdown", "networkx", "onnx", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pi-heif", "pikepdf", "pypandoc", "pypdf", "python-docx (>=1.1.2)", "python-pptx (>=1.0.1)", "unstructured-inference (==0.8.0)", "unstructured.pytesseract (>=0.3.12)", "xlrd"] md = ["markdown"] -msg = ["msg-parser"] -notion = ["htmlBuilder", "notion-client"] -odt = ["pypandoc", "python-docx (>=1.1.0)"] -onedrive = ["Office365-REST-Python-Client (<2.4.3)", "bs4", "msal"] -openai = ["langchain", "openai", "tiktoken"] +odt = ["pypandoc", "python-docx (>=1.1.2)"] org = ["pypandoc"] -outlook = ["Office365-REST-Python-Client (<2.4.3)", "msal"] -paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] -pdf = ["onnx", "pdf2image", "pdfminer.six", "unstructured-inference (==0.7.11)", "unstructured.pytesseract (>=0.3.12)"] -ppt = ["python-pptx (<=0.6.23)"] -pptx = ["python-pptx (<=0.6.23)"] -reddit = ["praw"] +paddleocr = ["paddlepaddle (==3.0.0b1)", "unstructured.paddleocr (==2.8.1.0)"] +pdf = ["effdet", "google-cloud-vision", "onnx", "pdf2image", "pdfminer.six", "pi-heif", "pikepdf", "pypdf", "unstructured-inference (==0.8.0)", "unstructured.pytesseract (>=0.3.12)"] +ppt = ["python-pptx (>=1.0.1)"] +pptx = ["python-pptx (>=1.0.1)"] rst = ["pypandoc"] rtf = ["pypandoc"] -s3 = ["fsspec (==2023.9.1)", "s3fs"] -salesforce = ["simple-salesforce"] -sharepoint = ["Office365-REST-Python-Client (<2.4.3)", "msal"] -slack = ["slack-sdk"] tsv = ["pandas"] -wikipedia = ["wikipedia"] xlsx = ["networkx", "openpyxl", "pandas", "xlrd"] +[[package]] +name = "unstructured-client" +version = "0.26.1" +description = "Python Client SDK for Unstructured API" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "unstructured_client-0.26.1-py3-none-any.whl", hash = "sha256:b8b839d477122bab3f37242cbe44b39f7eb7b564b07b53500321f953710119b6"}, + {file = "unstructured_client-0.26.1.tar.gz", hash = "sha256:907cceb470529b45b0fddb2d0f1bbf4d6568f347c757ab68639a7bb620ec2484"}, +] + +[package.dependencies] +cryptography = ">=3.1" +eval-type-backport = ">=0.2.0,<0.3.0" +httpx = ">=0.27.0" +jsonpath-python = ">=1.0.6,<2.0.0" +nest-asyncio = ">=1.6.0" +pydantic = ">=2.9.0,<2.10.0" +pypdf = ">=4.0" +python-dateutil = "2.8.2" +requests-toolbelt = ">=1.0.0" +typing-inspect = ">=0.9.0,<0.10.0" + +[[package]] +name = "upstash-vector" +version = "0.6.0" +description = "Serverless Vector SDK from Upstash" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "upstash_vector-0.6.0-py3-none-any.whl", hash = "sha256:d0bdad7765b8a7f5c205b7a9c81ca4b9a4cee3ee4952afc7d5ea5fb76c3f3c3c"}, + {file = "upstash_vector-0.6.0.tar.gz", hash = "sha256:a716ed4d0251362208518db8b194158a616d37d1ccbb1155f619df690599e39b"}, +] + +[package.dependencies] +httpx = ">=0.23.0,<1" + [[package]] name = "uritemplate" version = "4.1.1" @@ -9802,13 +9884,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.31.1" +version = "0.32.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.31.1-py3-none-any.whl", hash = "sha256:adc42d9cac80cf3e51af97c1851648066841e7cfb6993a4ca8de29ac1548ed41"}, - {file = "uvicorn-0.31.1.tar.gz", hash = "sha256:f5167919867b161b7bcaf32646c6a94cdbd4c3aa2eb5c17d36bb9aa5cfd8c493"}, + {file = "uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82"}, + {file = "uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e"}, ] [package.dependencies] @@ -10418,13 +10500,13 @@ files = [ [[package]] name = "xmltodict" -version = "0.14.1" +version = "0.14.2" description = "Makes working with XML feel like you are working with JSON" optional = false python-versions = ">=3.6" files = [ - {file = "xmltodict-0.14.1-py2.py3-none-any.whl", hash = "sha256:3ef4a7b71c08f19047fcbea572e1d7f4207ab269da1565b5d40e9823d3894e63"}, - {file = "xmltodict-0.14.1.tar.gz", hash = "sha256:338c8431e4fc554517651972d62f06958718f6262b04316917008e8fd677a6b0"}, + {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, + {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, ] [[package]] @@ -10534,13 +10616,13 @@ multidict = ">=4.0" [[package]] name = "yfinance" -version = "0.2.44" +version = "0.2.46" description = "Download market data from Yahoo! Finance API" optional = false python-versions = "*" files = [ - {file = "yfinance-0.2.44-py2.py3-none-any.whl", hash = "sha256:fdc18791662f286539f7a08dccd7e8191b1ca509814f7b0faac264623bebe8a8"}, - {file = "yfinance-0.2.44.tar.gz", hash = "sha256:532ad1644ee9cf4024ec0d9cade0cc073664ec0d140cc6c22a0cce8a9118b523"}, + {file = "yfinance-0.2.46-py2.py3-none-any.whl", hash = "sha256:371860d532cae76605195678a540e29382bfd0607f8aa61695f753e714916ffc"}, + {file = "yfinance-0.2.46.tar.gz", hash = "sha256:a6e2a128915532a54b8f6614cfdb7a8c242d2386e05f95c89b15865b5d9c0352"}, ] [package.dependencies] @@ -10617,48 +10699,48 @@ test = ["zope.testrunner"] [[package]] name = "zope-interface" -version = "7.1.0" +version = "7.1.1" description = "Interfaces for Python" optional = false python-versions = ">=3.8" files = [ - {file = "zope.interface-7.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2bd9e9f366a5df08ebbdc159f8224904c1c5ce63893984abb76954e6fbe4381a"}, - {file = "zope.interface-7.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:661d5df403cd3c5b8699ac480fa7f58047a3253b029db690efa0c3cf209993ef"}, - {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91b6c30689cfd87c8f264acb2fc16ad6b3c72caba2aec1bf189314cf1a84ca33"}, - {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b6a4924f5bad9fe21d99f66a07da60d75696a136162427951ec3cb223a5570d"}, - {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a3c00b35f6170be5454b45abe2719ea65919a2f09e8a6e7b1362312a872cd3"}, - {file = "zope.interface-7.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:b936d61dbe29572fd2cfe13e30b925e5383bed1aba867692670f5a2a2eb7b4e9"}, - {file = "zope.interface-7.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ac20581fc6cd7c754f6dff0ae06fedb060fa0e9ea6309d8be8b2701d9ea51c4"}, - {file = "zope.interface-7.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:848b6fa92d7c8143646e64124ed46818a0049a24ecc517958c520081fd147685"}, - {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec1ef1fdb6f014d5886b97e52b16d0f852364f447d2ab0f0c6027765777b6667"}, - {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bcff5c09d0215f42ba64b49205a278e44413d9bf9fa688fd9e42bfe472b5f4f"}, - {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07add15de0cc7e69917f7d286b64d54125c950aeb43efed7a5ea7172f000fbc1"}, - {file = "zope.interface-7.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:9940d5bc441f887c5f375ec62bcf7e7e495a2d5b1da97de1184a88fb567f06af"}, - {file = "zope.interface-7.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f245d039f72e6f802902375755846f5de1ee1e14c3e8736c078565599bcab621"}, - {file = "zope.interface-7.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6159e767d224d8f18deff634a1d3722e68d27488c357f62ebeb5f3e2f5288b1f"}, - {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e956b1fd7f3448dd5e00f273072e73e50dfafcb35e4227e6d5af208075593c9"}, - {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff115ef91c0eeac69cd92daeba36a9d8e14daee445b504eeea2b1c0b55821984"}, - {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec001798ab62c3fc5447162bf48496ae9fba02edc295a9e10a0b0c639a6452e"}, - {file = "zope.interface-7.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:124149e2d42067b9c6597f4dafdc7a0983d0163868f897b7bb5dc850b14f9a87"}, - {file = "zope.interface-7.1.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:9733a9a0f94ef53d7aa64661811b20875b5bc6039034c6e42fb9732170130573"}, - {file = "zope.interface-7.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5fcf379b875c610b5a41bc8a891841533f98de0520287d7f85e25386cd10d3e9"}, - {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0a45b5af9f72c805ee668d1479480ca85169312211bed6ed18c343e39307d5f"}, - {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af4a12b459a273b0b34679a5c3dc5e34c1847c3dd14a628aa0668e19e638ea2"}, - {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a735f82d2e3ed47ca01a20dfc4c779b966b16352650a8036ab3955aad151ed8a"}, - {file = "zope.interface-7.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:5501e772aff595e3c54266bc1bfc5858e8f38974ce413a8f1044aae0f32a83a3"}, - {file = "zope.interface-7.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec59fe53db7d32abb96c6d4efeed84aab4a7c38c62d7a901a9b20c09dd936e7a"}, - {file = "zope.interface-7.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e53c291debef523b09e1fe3dffe5f35dde164f1c603d77f770b88a1da34b7ed6"}, - {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:711eebc77f2092c6a8b304bad0b81a6ce3cf5490b25574e7309fbc07d881e3af"}, - {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a00ead2e24c76436e1b457a5132d87f83858330f6c923640b7ef82d668525d1"}, - {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e28ea0bc4b084fc93a483877653a033062435317082cdc6388dec3438309faf"}, - {file = "zope.interface-7.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:27cfb5205d68b12682b6e55ab8424662d96e8ead19550aad0796b08dd2c9a45e"}, - {file = "zope.interface-7.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e3e48f3dea21c147e1b10c132016cb79af1159facca9736d231694ef5a740a8"}, - {file = "zope.interface-7.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a99240b1d02dc469f6afbe7da1bf617645e60290c272968f4e53feec18d7dce8"}, - {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc8a318162123eddbdf22fcc7b751288ce52e4ad096d3766ff1799244352449d"}, - {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7b25db127db3e6b597c5f74af60309c4ad65acd826f89609662f0dc33a54728"}, - {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a29ac607e970b5576547f0e3589ec156e04de17af42839eedcf478450687317"}, - {file = "zope.interface-7.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:a14c9decf0eb61e0892631271d500c1e306c7b6901c998c7035e194d9150fdd1"}, - {file = "zope_interface-7.1.0.tar.gz", hash = "sha256:3f005869a1a05e368965adb2075f97f8ee9a26c61898a9e52a9764d93774f237"}, + {file = "zope.interface-7.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6650bd56ef350d37c8baccfd3ee8a0483ed6f8666e641e4b9ae1a1827b79f9e5"}, + {file = "zope.interface-7.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84e87eba6b77a3af187bae82d8de1a7c208c2a04ec9f6bd444fd091b811ad92e"}, + {file = "zope.interface-7.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c4e1b4c06d9abd1037c088dae1566c85f344a3e6ae4350744c3f7f7259d9c67"}, + {file = "zope.interface-7.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cd5e3d910ac87652a09f6e5db8e41bc3b49cf08ddd2d73d30afc644801492cd"}, + {file = "zope.interface-7.1.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca95594d936ee349620900be5b46c0122a1ff6ce42d7d5cb2cf09dc84071ef16"}, + {file = "zope.interface-7.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:ad339509dcfbbc99bf8e147db6686249c4032f26586699ec4c82f6e5909c9fe2"}, + {file = "zope.interface-7.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e59f175e868f856a77c0a77ba001385c377df2104fdbda6b9f99456a01e102a"}, + {file = "zope.interface-7.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0de23bcb93401994ea00bc5c677ef06d420340ac0a4e9c10d80e047b9ce5af3f"}, + {file = "zope.interface-7.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdb7e7e5524b76d3ec037c1d81a9e2c7457b240fd4cb0a2476b65c3a5a6c81f"}, + {file = "zope.interface-7.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3603ef82a9920bd0bfb505423cb7e937498ad971ad5a6141841e8f76d2fd5446"}, + {file = "zope.interface-7.1.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d52d052355e0c5c89e0630dd2ff7c0b823fd5f56286a663e92444761b35e25"}, + {file = "zope.interface-7.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:179ad46ece518c9084cb272e4a69d266b659f7f8f48e51706746c2d8a426433e"}, + {file = "zope.interface-7.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e6503534b52bb1720ace9366ee30838a58a3413d3e197512f3338c8f34b5d89d"}, + {file = "zope.interface-7.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f85b290e5b8b11814efb0d004d8ce6c9a483c35c462e8d9bf84abb93e79fa770"}, + {file = "zope.interface-7.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d029fac6a80edae80f79c37e5e3abfa92968fe921886139b3ee470a1b177321a"}, + {file = "zope.interface-7.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5836b8fb044c6e75ba34dfaabc602493019eadfa0faf6ff25f4c4c356a71a853"}, + {file = "zope.interface-7.1.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7395f13533318f150ee72adb55b29284b16e73b6d5f02ab21f173b3e83f242b8"}, + {file = "zope.interface-7.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:1d0e23c6b746eb8ce04573cc47bcac60961ac138885d207bd6f57e27a1431ae8"}, + {file = "zope.interface-7.1.1-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:9fad9bd5502221ab179f13ea251cb30eef7cf65023156967f86673aff54b53a0"}, + {file = "zope.interface-7.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:55c373becbd36a44d0c9be1d5271422fdaa8562d158fb44b4192297b3c67096c"}, + {file = "zope.interface-7.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed1df8cc01dd1e3970666a7370b8bfc7457371c58ba88c57bd5bca17ab198053"}, + {file = "zope.interface-7.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99c14f0727c978639139e6cad7a60e82b7720922678d75aacb90cf4ef74a068c"}, + {file = "zope.interface-7.1.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b1eed7670d564f1025d7cda89f99f216c30210e42e95de466135be0b4a499d9"}, + {file = "zope.interface-7.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:3defc925c4b22ac1272d544a49c6ba04c3eefcce3200319ee1be03d9270306dd"}, + {file = "zope.interface-7.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8d0fe45be57b5219aa4b96e846631c04615d5ef068146de5a02ccd15c185321f"}, + {file = "zope.interface-7.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bcbeb44fc16e0078b3b68a95e43f821ae34dcbf976dde6985141838a5f23dd3d"}, + {file = "zope.interface-7.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8e7b05dc6315a193cceaec071cc3cf1c180cea28808ccded0b1283f1c38ba73"}, + {file = "zope.interface-7.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d553e02b68c0ea5a226855f02edbc9eefd99f6a8886fa9f9bdf999d77f46585"}, + {file = "zope.interface-7.1.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81744a7e61b598ebcf4722ac56a7a4f50502432b5b4dc7eb29075a89cf82d029"}, + {file = "zope.interface-7.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7720322763aceb5e0a7cadcc38c67b839efe599f0887cbf6c003c55b1458c501"}, + {file = "zope.interface-7.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ed0852c25950cf430067f058f8d98df6288502ac313861d9803fe7691a9b3"}, + {file = "zope.interface-7.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9595e478047ce752b35cfa221d7601a5283ccdaab40422e0dc1d4a334c70f580"}, + {file = "zope.interface-7.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2317e1d4dba68203a5227ea3057f9078ec9376275f9700086b8f0ffc0b358e1b"}, + {file = "zope.interface-7.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6821ef9870f32154da873fcde439274f99814ea452dd16b99fa0b66345c4b6b"}, + {file = "zope.interface-7.1.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:190eeec67e023d5aac54d183fa145db0b898664234234ac54643a441da434616"}, + {file = "zope.interface-7.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:d17e7fc814eaab93409b80819fd6d30342844345c27f3bc3c4b43c2425a8d267"}, + {file = "zope.interface-7.1.1.tar.gz", hash = "sha256:4284d664ef0ff7b709836d4de7b13d80873dc5faeffc073abdb280058bfac5e3"}, ] [package.dependencies] @@ -10784,4 +10866,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "51f048197baebf9ffdc393e5990b9a90185bc5ff515b8b5d2d9b72de900cf6e2" +content-hash = "1b268122d3d4771ba219f0e983322e0454b7b8644dba35da38d7d950d489e1ba" diff --git a/api/pyproject.toml b/api/pyproject.toml index ec5266f926..a549601535 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -87,14 +87,6 @@ ignore = [ "tests/*" = [ "F811", # redefined-while-unused "F401", # unused-import - "PT001", # missing-function-docstring - "PT004", # missing-parameter-docstring -] -"core/rag/extractor/word_extractor.py" = [ - "RUF100", # Unused `noqa` directive -] -"core/tools/provider/builtin/gitlab/tools/gitlab_commits.py" = [ - "PLR1714", # Consider merging multiple comparisons ] [tool.ruff.lint.pyflakes] @@ -158,13 +150,13 @@ nomic = "~3.1.2" novita-client = "~0.5.7" numpy = "~1.26.4" oci = "~2.135.1" -openai = "~1.51.2" +openai = "~1.52.0" openpyxl = "~3.1.5" pandas = { version = "~2.2.2", extras = ["performance", "excel"] } psycopg2-binary = "~2.9.6" pycryptodome = "3.19.1" -pydantic = "~2.8.2" -pydantic-settings = "~2.4.0" +pydantic = "~2.9.2" +pydantic-settings = "~2.6.0" pydantic_extra_types = "~2.9.0" pyjwt = "~2.8.0" pypdfium2 = "~4.17.0" @@ -180,11 +172,12 @@ sagemaker = "2.231.0" scikit-learn = "~1.5.1" sentry-sdk = { version = "~1.44.1", extras = ["flask"] } sqlalchemy = "~2.0.29" +starlette = "0.41.0" tencentcloud-sdk-python-hunyuan = "~3.0.1158" tiktoken = "~0.8.0" tokenizers = "~0.15.0" transformers = "~4.35.0" -unstructured = { version = "~0.10.27", extras = ["docx", "epub", "md", "msg", "ppt", "pptx"] } +unstructured = { version = "~0.16.1", extras = ["docx", "epub", "md", "msg", "ppt", "pptx"] } validators = "0.21.0" volcengine-python-sdk = {extras = ["ark"], version = "~1.0.98"} websocket-client = "~1.7.0" @@ -214,8 +207,9 @@ duckduckgo-search = "~6.3.0" jsonpath-ng = "1.6.1" matplotlib = "~3.8.2" newspaper3k = "0.2.8" -nltk = "3.8.1" +nltk = "3.9.1" numexpr = "~2.9.0" +pydub = "~0.25.1" qrcode = "~7.4.2" twilio = "~9.0.4" vanna = { version = "0.7.3", extras = ["postgres", "mysql", "clickhouse", "duckdb"] } @@ -255,6 +249,7 @@ pymochow = "1.3.1" qdrant-client = "1.7.3" tcvectordb = "1.3.2" tidb-vector = "0.0.9" +upstash-vector = "0.6.0" volcengine-compat = "~1.0.156" weaviate-client = "~3.21.0" diff --git a/api/schedule/create_tidb_serverless_task.py b/api/schedule/create_tidb_serverless_task.py new file mode 100644 index 0000000000..42d6c04beb --- /dev/null +++ b/api/schedule/create_tidb_serverless_task.py @@ -0,0 +1,56 @@ +import time + +import click + +import app +from configs import dify_config +from core.rag.datasource.vdb.tidb_on_qdrant.tidb_service import TidbService +from extensions.ext_database import db +from models.dataset import TidbAuthBinding + + +@app.celery.task(queue="dataset") +def create_tidb_serverless_task(): + click.echo(click.style("Start create tidb serverless task.", fg="green")) + tidb_serverless_number = dify_config.TIDB_SERVERLESS_NUMBER + start_at = time.perf_counter() + while True: + try: + # check the number of idle tidb serverless + idle_tidb_serverless_number = TidbAuthBinding.query.filter(TidbAuthBinding.active == False).count() + if idle_tidb_serverless_number >= tidb_serverless_number: + break + # create tidb serverless + iterations_per_thread = 20 + create_clusters(iterations_per_thread) + + except Exception as e: + click.echo(click.style(f"Error: {e}", fg="red")) + break + + end_at = time.perf_counter() + click.echo(click.style("Create tidb serverless task success latency: {}".format(end_at - start_at), fg="green")) + + +def create_clusters(batch_size): + try: + new_clusters = TidbService.batch_create_tidb_serverless_cluster( + batch_size, + dify_config.TIDB_PROJECT_ID, + dify_config.TIDB_API_URL, + dify_config.TIDB_IAM_API_URL, + dify_config.TIDB_PUBLIC_KEY, + dify_config.TIDB_PRIVATE_KEY, + dify_config.TIDB_REGION, + ) + for new_cluster in new_clusters: + tidb_auth_binding = TidbAuthBinding( + cluster_id=new_cluster["cluster_id"], + cluster_name=new_cluster["cluster_name"], + account=new_cluster["account"], + password=new_cluster["password"], + ) + db.session.add(tidb_auth_binding) + db.session.commit() + except Exception as e: + click.echo(click.style(f"Error: {e}", fg="red")) diff --git a/api/schedule/update_tidb_serverless_status_task.py b/api/schedule/update_tidb_serverless_status_task.py new file mode 100644 index 0000000000..07eca3173b --- /dev/null +++ b/api/schedule/update_tidb_serverless_status_task.py @@ -0,0 +1,51 @@ +import time + +import click + +import app +from configs import dify_config +from core.rag.datasource.vdb.tidb_on_qdrant.tidb_service import TidbService +from models.dataset import TidbAuthBinding + + +@app.celery.task(queue="dataset") +def update_tidb_serverless_status_task(): + click.echo(click.style("Update tidb serverless status task.", fg="green")) + start_at = time.perf_counter() + while True: + try: + # check the number of idle tidb serverless + tidb_serverless_list = TidbAuthBinding.query.filter( + TidbAuthBinding.active == False, TidbAuthBinding.status == "CREATING" + ).all() + if len(tidb_serverless_list) == 0: + break + # update tidb serverless status + iterations_per_thread = 20 + update_clusters(tidb_serverless_list) + + except Exception as e: + click.echo(click.style(f"Error: {e}", fg="red")) + break + + end_at = time.perf_counter() + click.echo( + click.style("Update tidb serverless status task success latency: {}".format(end_at - start_at), fg="green") + ) + + +def update_clusters(tidb_serverless_list: list[TidbAuthBinding]): + try: + # batch 20 + for i in range(0, len(tidb_serverless_list), 20): + items = tidb_serverless_list[i : i + 20] + TidbService.batch_update_tidb_serverless_cluster_status( + items, + dify_config.TIDB_PROJECT_ID, + dify_config.TIDB_API_URL, + dify_config.TIDB_IAM_API_URL, + dify_config.TIDB_PUBLIC_KEY, + dify_config.TIDB_PRIVATE_KEY, + ) + except Exception as e: + click.echo(click.style(f"Error: {e}", fg="red")) diff --git a/api/services/account_service.py b/api/services/account_service.py index eda6011aef..27b1540c8d 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -1,6 +1,7 @@ import base64 import json import logging +import random import secrets import uuid from datetime import datetime, timedelta, timezone @@ -34,7 +35,9 @@ from models.model import DifySetup from services.errors.account import ( AccountAlreadyInTenantError, AccountLoginError, + AccountNotFoundError, AccountNotLinkTenantError, + AccountPasswordError, AccountRegisterError, CannotOperateSelfError, CurrentPasswordIncorrectError, @@ -42,10 +45,12 @@ from services.errors.account import ( LinkAccountIntegrateError, MemberNotInTenantError, NoPermissionError, - RateLimitExceededError, RoleAlreadyAssignedError, TenantNotFoundError, ) +from services.errors.workspace import WorkSpaceNotAllowedCreateError +from services.feature_service import FeatureService +from tasks.mail_email_code_login import send_email_code_login_mail_task from tasks.mail_invite_member_task import send_invite_member_mail_task from tasks.mail_reset_password_task import send_reset_password_mail_task @@ -61,7 +66,11 @@ REFRESH_TOKEN_EXPIRY = timedelta(days=30) class AccountService: - reset_password_rate_limiter = RateLimiter(prefix="reset_password_rate_limit", max_attempts=5, time_window=60 * 60) + reset_password_rate_limiter = RateLimiter(prefix="reset_password_rate_limit", max_attempts=1, time_window=60 * 1) + email_code_login_rate_limiter = RateLimiter( + prefix="email_code_login_rate_limit", max_attempts=1, time_window=60 * 1 + ) + LOGIN_MAX_ERROR_LIMITS = 5 @staticmethod def _get_refresh_token_key(refresh_token: str) -> str: @@ -89,8 +98,8 @@ class AccountService: if not account: return None - if account.status in {AccountStatus.BANNED.value, AccountStatus.CLOSED.value}: - raise Unauthorized("Account is banned or closed.") + if account.status == AccountStatus.BANNED.value: + raise Unauthorized("Account is banned.") current_tenant = TenantAccountJoin.query.filter_by(account_id=account.id, current=True).first() if current_tenant: @@ -127,23 +136,34 @@ class AccountService: return token @staticmethod - def authenticate(email: str, password: str) -> Account: + def authenticate(email: str, password: str, invite_token: Optional[str] = None) -> Account: """authenticate account with email and password""" account = Account.query.filter_by(email=email).first() if not account: - raise AccountLoginError("Invalid email or password.") + raise AccountNotFoundError() - if account.status in {AccountStatus.BANNED.value, AccountStatus.CLOSED.value}: - raise AccountLoginError("Account is banned or closed.") + if account.status == AccountStatus.BANNED.value: + raise AccountLoginError("Account is banned.") + + if password and invite_token and account.password is None: + # if invite_token is valid, set password and password_salt + salt = secrets.token_bytes(16) + base64_salt = base64.b64encode(salt).decode() + password_hashed = hash_password(password, salt) + base64_password_hashed = base64.b64encode(password_hashed).decode() + account.password = base64_password_hashed + account.password_salt = base64_salt + + if account.password is None or not compare_password(password, account.password, account.password_salt): + raise AccountPasswordError("Invalid email or password.") if account.status == AccountStatus.PENDING.value: account.status = AccountStatus.ACTIVE.value account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None) - db.session.commit() - if account.password is None or not compare_password(password, account.password, account.password_salt): - raise AccountLoginError("Invalid email or password.") + db.session.commit() + return account @staticmethod @@ -169,9 +189,18 @@ class AccountService: @staticmethod def create_account( - email: str, name: str, interface_language: str, password: Optional[str] = None, interface_theme: str = "light" + email: str, + name: str, + interface_language: str, + password: Optional[str] = None, + interface_theme: str = "light", + is_setup: Optional[bool] = False, ) -> Account: """create account""" + if not FeatureService.get_system_features().is_allow_register and not is_setup: + from controllers.console.error import NotAllowedRegister + + raise NotAllowedRegister() account = Account() account.email = email account.name = name @@ -198,6 +227,19 @@ class AccountService: db.session.commit() return account + @staticmethod + def create_account_and_tenant( + email: str, name: str, interface_language: str, password: Optional[str] = None + ) -> Account: + """create account""" + account = AccountService.create_account( + email=email, name=name, interface_language=interface_language, password=password + ) + + TenantService.create_owner_tenant_if_not_exist(account=account) + + return account + @staticmethod def link_account_integrate(provider: str, open_id: str, account: Account) -> None: """Link account integrate""" @@ -256,6 +298,10 @@ class AccountService: if ip_address: AccountService.update_login_info(account=account, ip_address=ip_address) + if account.status == AccountStatus.PENDING.value: + account.status = AccountStatus.ACTIVE.value + db.session.commit() + access_token = AccountService.get_account_jwt_token(account=account) refresh_token = _generate_refresh_token() @@ -294,13 +340,29 @@ class AccountService: return AccountService.load_user(account_id) @classmethod - def send_reset_password_email(cls, account): - if cls.reset_password_rate_limiter.is_rate_limited(account.email): - raise RateLimitExceededError(f"Rate limit exceeded for email: {account.email}. Please try again later.") + def send_reset_password_email( + cls, + account: Optional[Account] = None, + email: Optional[str] = None, + language: Optional[str] = "en-US", + ): + account_email = account.email if account else email - token = TokenManager.generate_token(account, "reset_password") - send_reset_password_mail_task.delay(language=account.interface_language, to=account.email, token=token) - cls.reset_password_rate_limiter.increment_rate_limit(account.email) + if cls.reset_password_rate_limiter.is_rate_limited(account_email): + from controllers.console.auth.error import PasswordResetRateLimitExceededError + + raise PasswordResetRateLimitExceededError() + + code = "".join([str(random.randint(0, 9)) for _ in range(6)]) + token = TokenManager.generate_token( + account=account, email=email, token_type="reset_password", additional_data={"code": code} + ) + send_reset_password_mail_task.delay( + language=language, + to=account_email, + code=code, + ) + cls.reset_password_rate_limiter.increment_rate_limit(account_email) return token @classmethod @@ -311,11 +373,125 @@ class AccountService: def get_reset_password_data(cls, token: str) -> Optional[dict[str, Any]]: return TokenManager.get_token_data(token, "reset_password") + @classmethod + def send_email_code_login_email( + cls, account: Optional[Account] = None, email: Optional[str] = None, language: Optional[str] = "en-US" + ): + if cls.email_code_login_rate_limiter.is_rate_limited(email): + from controllers.console.auth.error import EmailCodeLoginRateLimitExceededError + + raise EmailCodeLoginRateLimitExceededError() + + code = "".join([str(random.randint(0, 9)) for _ in range(6)]) + token = TokenManager.generate_token( + account=account, email=email, token_type="email_code_login", additional_data={"code": code} + ) + send_email_code_login_mail_task.delay( + language=language, + to=account.email if account else email, + code=code, + ) + cls.email_code_login_rate_limiter.increment_rate_limit(email) + return token + + @classmethod + def get_email_code_login_data(cls, token: str) -> Optional[dict[str, Any]]: + return TokenManager.get_token_data(token, "email_code_login") + + @classmethod + def revoke_email_code_login_token(cls, token: str): + TokenManager.revoke_token(token, "email_code_login") + + @classmethod + def get_user_through_email(cls, email: str): + account = db.session.query(Account).filter(Account.email == email).first() + if not account: + return None + + if account.status == AccountStatus.BANNED.value: + raise Unauthorized("Account is banned.") + + return account + + @staticmethod + def add_login_error_rate_limit(email: str) -> None: + key = f"login_error_rate_limit:{email}" + count = redis_client.get(key) + if count is None: + count = 0 + count = int(count) + 1 + redis_client.setex(key, 60 * 60 * 24, count) + + @staticmethod + def is_login_error_rate_limit(email: str) -> bool: + key = f"login_error_rate_limit:{email}" + count = redis_client.get(key) + if count is None: + return False + + count = int(count) + if count > AccountService.LOGIN_MAX_ERROR_LIMITS: + return True + return False + + @staticmethod + def reset_login_error_rate_limit(email: str): + key = f"login_error_rate_limit:{email}" + redis_client.delete(key) + + @staticmethod + def is_email_send_ip_limit(ip_address: str): + minute_key = f"email_send_ip_limit_minute:{ip_address}" + freeze_key = f"email_send_ip_limit_freeze:{ip_address}" + hour_limit_key = f"email_send_ip_limit_hour:{ip_address}" + + # check ip is frozen + if redis_client.get(freeze_key): + return True + + # check current minute count + current_minute_count = redis_client.get(minute_key) + if current_minute_count is None: + current_minute_count = 0 + current_minute_count = int(current_minute_count) + + # check current hour count + if current_minute_count > dify_config.EMAIL_SEND_IP_LIMIT_PER_MINUTE: + hour_limit_count = redis_client.get(hour_limit_key) + if hour_limit_count is None: + hour_limit_count = 0 + hour_limit_count = int(hour_limit_count) + + if hour_limit_count >= 1: + redis_client.setex(freeze_key, 60 * 60, 1) + return True + else: + redis_client.setex(hour_limit_key, 60 * 10, hour_limit_count + 1) # first time limit 10 minutes + + # add hour limit count + redis_client.incr(hour_limit_key) + redis_client.expire(hour_limit_key, 60 * 60) + + return True + + redis_client.setex(minute_key, 60, current_minute_count + 1) + redis_client.expire(minute_key, 60) + + return False + + +def _get_login_cache_key(*, account_id: str, token: str): + return f"account_login:{account_id}:{token}" + class TenantService: @staticmethod - def create_tenant(name: str) -> Tenant: + def create_tenant(name: str, is_setup: Optional[bool] = False) -> Tenant: """Create tenant""" + if not FeatureService.get_system_features().is_allow_create_workspace and not is_setup: + from controllers.console.error import NotAllowedCreateWorkspace + + raise NotAllowedCreateWorkspace() tenant = Tenant(name=name) db.session.add(tenant) @@ -326,8 +502,10 @@ class TenantService: return tenant @staticmethod - def create_owner_tenant_if_not_exist(account: Account, name: Optional[str] = None): - """Create owner tenant if not exist""" + def create_owner_tenant_if_not_exist( + account: Account, name: Optional[str] = None, is_setup: Optional[bool] = False + ): + """Check if user have a workspace or not""" available_ta = ( TenantAccountJoin.query.filter_by(account_id=account.id).order_by(TenantAccountJoin.id.asc()).first() ) @@ -335,10 +513,14 @@ class TenantService: if available_ta: return + """Create owner tenant if not exist""" + if not FeatureService.get_system_features().is_allow_create_workspace and not is_setup: + raise WorkSpaceNotAllowedCreateError() + if name: - tenant = TenantService.create_tenant(name) + tenant = TenantService.create_tenant(name=name, is_setup=is_setup) else: - tenant = TenantService.create_tenant(f"{account.name}'s Workspace") + tenant = TenantService.create_tenant(name=f"{account.name}'s Workspace", is_setup=is_setup) TenantService.create_tenant_member(tenant, account, role="owner") account.current_tenant = tenant db.session.commit() @@ -352,8 +534,13 @@ class TenantService: logging.error(f"Tenant {tenant.id} has already an owner.") raise Exception("Tenant already has an owner.") - ta = TenantAccountJoin(tenant_id=tenant.id, account_id=account.id, role=role) - db.session.add(ta) + ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first() + if ta: + ta.role = role + else: + ta = TenantAccountJoin(tenant_id=tenant.id, account_id=account.id, role=role) + db.session.add(ta) + db.session.commit() return ta @@ -570,12 +757,13 @@ class RegisterService: name=name, interface_language=languages[0], password=password, + is_setup=True, ) account.last_login_ip = ip_address account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None) - TenantService.create_owner_tenant_if_not_exist(account) + TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True) dify_setup = DifySetup(version=dify_config.CURRENT_VERSION) db.session.add(dify_setup) @@ -600,27 +788,33 @@ class RegisterService: provider: Optional[str] = None, language: Optional[str] = None, status: Optional[AccountStatus] = None, + is_setup: Optional[bool] = False, ) -> Account: db.session.begin_nested() """Register account""" try: account = AccountService.create_account( - email=email, name=name, interface_language=language or languages[0], password=password + email=email, + name=name, + interface_language=language or languages[0], + password=password, + is_setup=is_setup, ) account.status = AccountStatus.ACTIVE.value if not status else status.value account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None) if open_id is not None or provider is not None: AccountService.link_account_integrate(provider, open_id, account) - if dify_config.EDITION != "SELF_HOSTED": - tenant = TenantService.create_tenant(f"{account.name}'s Workspace") + if FeatureService.get_system_features().is_allow_create_workspace: + tenant = TenantService.create_tenant(f"{account.name}'s Workspace") TenantService.create_tenant_member(tenant, account, role="owner") account.current_tenant = tenant - tenant_was_created.send(tenant) db.session.commit() + except WorkSpaceNotAllowedCreateError: + db.session.rollback() except Exception as e: db.session.rollback() logging.error(f"Register failed: {e}") @@ -639,7 +833,9 @@ class RegisterService: TenantService.check_member_permission(tenant, inviter, None, "add") name = email.split("@")[0] - account = cls.register(email=email, name=name, language=language, status=AccountStatus.PENDING) + account = cls.register( + email=email, name=name, language=language, status=AccountStatus.PENDING, is_setup=True + ) # Create new tenant member for invited tenant TenantService.create_tenant_member(tenant, account, role) TenantService.switch_tenant(account, tenant.id) @@ -679,6 +875,11 @@ class RegisterService: redis_client.setex(cls._get_invitation_token_key(token), expiry_hours * 60 * 60, json.dumps(invitation_data)) return token + @classmethod + def is_valid_invite_token(cls, token: str) -> bool: + data = redis_client.get(cls._get_invitation_token_key(token)) + return data is not None + @classmethod def revoke_token(cls, workspace_id: str, email: str, token: str): if workspace_id and email: @@ -727,7 +928,9 @@ class RegisterService: } @classmethod - def _get_invitation_by_token(cls, token: str, workspace_id: str, email: str) -> Optional[dict[str, str]]: + def _get_invitation_by_token( + cls, token: str, workspace_id: Optional[str] = None, email: Optional[str] = None + ) -> Optional[dict[str, str]]: if workspace_id is not None and email is not None: email_hash = sha256(email.encode()).hexdigest() cache_key = f"member_invite_token:{workspace_id}, {email_hash}:{token}" diff --git a/api/services/agent_service.py b/api/services/agent_service.py index 887fb878b9..c8819535f1 100644 --- a/api/services/agent_service.py +++ b/api/services/agent_service.py @@ -68,7 +68,7 @@ class AgentService: "iterations": len(agent_thoughts), }, "iterations": [], - "files": message.files, + "files": message.message_files, } agent_config = AgentConfigManager.convert(app_model.app_model_config.to_dict()) diff --git a/api/services/annotation_service.py b/api/services/annotation_service.py index 3cc6c51c2d..915d37ec03 100644 --- a/api/services/annotation_service.py +++ b/api/services/annotation_service.py @@ -132,14 +132,14 @@ class AppAnnotationService: MessageAnnotation.content.ilike("%{}%".format(keyword)), ) ) - .order_by(MessageAnnotation.created_at.desc()) + .order_by(MessageAnnotation.created_at.desc(), MessageAnnotation.id.desc()) .paginate(page=page, per_page=limit, max_per_page=100, error_out=False) ) else: annotations = ( db.session.query(MessageAnnotation) .filter(MessageAnnotation.app_id == app_id) - .order_by(MessageAnnotation.created_at.desc()) + .order_by(MessageAnnotation.created_at.desc(), MessageAnnotation.id.desc()) .paginate(page=page, per_page=limit, max_per_page=100, error_out=False) ) return annotations.items, annotations.total diff --git a/api/services/app_dsl_service.py b/api/services/app_dsl_service.py index 54594e1175..750d0a8cd2 100644 --- a/api/services/app_dsl_service.py +++ b/api/services/app_dsl_service.py @@ -3,9 +3,9 @@ import logging import httpx import yaml # type: ignore -from core.app.segments import factory from events.app_event import app_model_config_was_updated, app_was_created from extensions.ext_database import db +from factories import variable_factory from models.account import Account from models.model import App, AppMode, AppModelConfig from models.workflow import Workflow @@ -254,14 +254,18 @@ class AppDslService: # init draft workflow environment_variables_list = workflow_data.get("environment_variables") or [] - environment_variables = [factory.build_variable_from_mapping(obj) for obj in environment_variables_list] + environment_variables = [ + variable_factory.build_variable_from_mapping(obj) for obj in environment_variables_list + ] conversation_variables_list = workflow_data.get("conversation_variables") or [] - conversation_variables = [factory.build_variable_from_mapping(obj) for obj in conversation_variables_list] + conversation_variables = [ + variable_factory.build_variable_from_mapping(obj) for obj in conversation_variables_list + ] workflow_service = WorkflowService() draft_workflow = workflow_service.sync_draft_workflow( app_model=app, graph=workflow_data.get("graph", {}), - features=workflow_data.get("../core/app/features", {}), + features=workflow_data.get("features", {}), unique_hash=None, account=account, environment_variables=environment_variables, @@ -295,9 +299,13 @@ class AppDslService: # sync draft workflow environment_variables_list = workflow_data.get("environment_variables") or [] - environment_variables = [factory.build_variable_from_mapping(obj) for obj in environment_variables_list] + environment_variables = [ + variable_factory.build_variable_from_mapping(obj) for obj in environment_variables_list + ] conversation_variables_list = workflow_data.get("conversation_variables") or [] - conversation_variables = [factory.build_variable_from_mapping(obj) for obj in conversation_variables_list] + conversation_variables = [ + variable_factory.build_variable_from_mapping(obj) for obj in conversation_variables_list + ] draft_workflow = workflow_service.sync_draft_workflow( app_model=app_model, graph=workflow_data.get("graph", {}), diff --git a/api/services/app_generate_service.py b/api/services/app_generate_service.py index 26517a05fb..83a9a16904 100644 --- a/api/services/app_generate_service.py +++ b/api/services/app_generate_service.py @@ -1,4 +1,4 @@ -from collections.abc import Generator +from collections.abc import Generator, Mapping from typing import Any, Union from openai._exceptions import RateLimitError @@ -23,7 +23,7 @@ class AppGenerateService: cls, app_model: App, user: Union[Account, EndUser], - args: Any, + args: Mapping[str, Any], invoke_from: InvokeFrom, streaming: bool = True, ): diff --git a/api/services/auth/jina.py b/api/services/auth/jina.py new file mode 100644 index 0000000000..de898a1f94 --- /dev/null +++ b/api/services/auth/jina.py @@ -0,0 +1,44 @@ +import json + +import requests + +from services.auth.api_key_auth_base import ApiKeyAuthBase + + +class JinaAuth(ApiKeyAuthBase): + def __init__(self, credentials: dict): + super().__init__(credentials) + auth_type = credentials.get("auth_type") + if auth_type != "bearer": + raise ValueError("Invalid auth type, Jina Reader auth type must be Bearer") + self.api_key = credentials.get("config").get("api_key", None) + + if not self.api_key: + raise ValueError("No API key provided") + + def validate_credentials(self): + headers = self._prepare_headers() + options = { + "url": "https://example.com", + } + response = self._post_request("https://r.jina.ai", options, headers) + if response.status_code == 200: + return True + else: + self._handle_error(response) + + def _prepare_headers(self): + return {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} + + def _post_request(self, url, data, headers): + return requests.post(url, headers=headers, json=data) + + def _handle_error(self, response): + if response.status_code in {402, 409, 500}: + error_message = response.json().get("error", "Unknown error occurred") + raise Exception(f"Failed to authorize. Status code: {response.status_code}. Error: {error_message}") + else: + if response.text: + error_message = json.loads(response.text).get("error", "Unknown error occurred") + raise Exception(f"Failed to authorize. Status code: {response.status_code}. Error: {error_message}") + raise Exception(f"Unexpected error occurred while trying to authorize. Status code: {response.status_code}") diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index ede8764086..ca084bde56 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -140,6 +140,7 @@ class DatasetService: def create_empty_dataset( tenant_id: str, name: str, + description: Optional[str], indexing_technique: Optional[str], account: Account, permission: Optional[str] = None, @@ -158,6 +159,7 @@ class DatasetService: ) dataset = Dataset(name=name, indexing_technique=indexing_technique) # dataset = Dataset(name=name, provider=provider, config=config) + dataset.description = description dataset.created_by = account.id dataset.updated_by = account.id dataset.tenant_id = tenant_id diff --git a/api/services/enterprise/base.py b/api/services/enterprise/base.py index 7d4fdfd2d0..92098f06cc 100644 --- a/api/services/enterprise/base.py +++ b/api/services/enterprise/base.py @@ -15,8 +15,6 @@ class EnterpriseRequest: @classmethod def send_request(cls, method, endpoint, json=None, params=None): headers = {"Content-Type": "application/json", "Enterprise-Api-Secret-Key": cls.secret_key} - url = f"{cls.base_url}{endpoint}" response = requests.request(method, url, json=json, params=params, headers=headers, proxies=cls.proxies) - return response.json() diff --git a/api/services/errors/account.py b/api/services/errors/account.py index 82dd9f944a..5aca12ffeb 100644 --- a/api/services/errors/account.py +++ b/api/services/errors/account.py @@ -13,6 +13,10 @@ class AccountLoginError(BaseServiceError): pass +class AccountPasswordError(BaseServiceError): + pass + + class AccountNotLinkTenantError(BaseServiceError): pass diff --git a/api/services/feature_service.py b/api/services/feature_service.py index 4d5812c6c6..c321393bc5 100644 --- a/api/services/feature_service.py +++ b/api/services/feature_service.py @@ -42,6 +42,11 @@ class SystemFeatureModel(BaseModel): sso_enforced_for_web: bool = False sso_enforced_for_web_protocol: str = "" enable_web_sso_switch_component: bool = False + enable_email_code_login: bool = False + enable_email_password_login: bool = True + enable_social_oauth_login: bool = False + is_allow_register: bool = False + is_allow_create_workspace: bool = False class FeatureService: @@ -60,12 +65,23 @@ class FeatureService: def get_system_features(cls) -> SystemFeatureModel: system_features = SystemFeatureModel() + cls._fulfill_system_params_from_env(system_features) + if dify_config.ENTERPRISE_ENABLED: system_features.enable_web_sso_switch_component = True + cls._fulfill_params_from_enterprise(system_features) return system_features + @classmethod + def _fulfill_system_params_from_env(cls, system_features: SystemFeatureModel): + system_features.enable_email_code_login = dify_config.ENABLE_EMAIL_CODE_LOGIN + system_features.enable_email_password_login = dify_config.ENABLE_EMAIL_PASSWORD_LOGIN + system_features.enable_social_oauth_login = dify_config.ENABLE_SOCIAL_OAUTH_LOGIN + system_features.is_allow_register = dify_config.ALLOW_REGISTER + system_features.is_allow_create_workspace = dify_config.ALLOW_CREATE_WORKSPACE + @classmethod def _fulfill_params_from_env(cls, features: FeatureModel): features.can_replace_logo = dify_config.CAN_REPLACE_LOGO @@ -113,7 +129,19 @@ class FeatureService: def _fulfill_params_from_enterprise(cls, features): enterprise_info = EnterpriseService.get_info() - features.sso_enforced_for_signin = enterprise_info["sso_enforced_for_signin"] - features.sso_enforced_for_signin_protocol = enterprise_info["sso_enforced_for_signin_protocol"] - features.sso_enforced_for_web = enterprise_info["sso_enforced_for_web"] - features.sso_enforced_for_web_protocol = enterprise_info["sso_enforced_for_web_protocol"] + if "sso_enforced_for_signin" in enterprise_info: + features.sso_enforced_for_signin = enterprise_info["sso_enforced_for_signin"] + if "sso_enforced_for_signin_protocol" in enterprise_info: + features.sso_enforced_for_signin_protocol = enterprise_info["sso_enforced_for_signin_protocol"] + if "sso_enforced_for_web" in enterprise_info: + features.sso_enforced_for_web = enterprise_info["sso_enforced_for_web"] + if "sso_enforced_for_web_protocol" in enterprise_info: + features.sso_enforced_for_web_protocol = enterprise_info["sso_enforced_for_web_protocol"] + if "enable_email_code_login" in enterprise_info: + features.enable_email_code_login = enterprise_info["enable_email_code_login"] + if "enable_email_password_login" in enterprise_info: + features.enable_email_password_login = enterprise_info["enable_email_password_login"] + if "is_allow_register" in enterprise_info: + features.is_allow_register = enterprise_info["is_allow_register"] + if "is_allow_create_workspace" in enterprise_info: + features.is_allow_create_workspace = enterprise_info["is_allow_create_workspace"] diff --git a/api/services/file_service.py b/api/services/file_service.py index bedec76334..6193a39669 100644 --- a/api/services/file_service.py +++ b/api/services/file_service.py @@ -1,80 +1,68 @@ import datetime import hashlib import uuid -from collections.abc import Generator -from typing import Union +from typing import Literal, Union from flask_login import current_user from werkzeug.datastructures import FileStorage from werkzeug.exceptions import NotFound from configs import dify_config -from core.file.upload_file_parser import UploadFileParser +from constants import ( + AUDIO_EXTENSIONS, + DOCUMENT_EXTENSIONS, + IMAGE_EXTENSIONS, + VIDEO_EXTENSIONS, +) +from core.file import helpers as file_helpers from core.rag.extractor.extract_processor import ExtractProcessor from extensions.ext_database import db from extensions.ext_storage import storage from models.account import Account +from models.enums import CreatedByRole from models.model import EndUser, UploadFile -from services.errors.file import FileTooLargeError, UnsupportedFileTypeError - -IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"] -IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS]) - -ALLOWED_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls", "docx", "csv"] -UNSTRUCTURED_ALLOWED_EXTENSIONS = [ - "txt", - "markdown", - "md", - "pdf", - "html", - "htm", - "xlsx", - "xls", - "docx", - "csv", - "eml", - "msg", - "pptx", - "ppt", - "xml", - "epub", -] +from services.errors.file import FileNotExistsError, FileTooLargeError, UnsupportedFileTypeError PREVIEW_WORDS_LIMIT = 3000 class FileService: @staticmethod - def upload_file(file: FileStorage, user: Union[Account, EndUser], only_image: bool = False) -> UploadFile: + def upload_file( + file: FileStorage, user: Union[Account, EndUser], source: Literal["datasets"] | None = None + ) -> UploadFile: + # get file name filename = file.filename - extension = file.filename.split(".")[-1] + if not filename: + raise FileNotExistsError + extension = filename.split(".")[-1] if len(filename) > 200: filename = filename.split(".")[0][:200] + "." + extension - etl_type = dify_config.ETL_TYPE - allowed_extensions = ( - UNSTRUCTURED_ALLOWED_EXTENSIONS + IMAGE_EXTENSIONS - if etl_type == "Unstructured" - else ALLOWED_EXTENSIONS + IMAGE_EXTENSIONS - ) - if extension.lower() not in allowed_extensions or only_image and extension.lower() not in IMAGE_EXTENSIONS: + + if source == "datasets" and extension not in DOCUMENT_EXTENSIONS: raise UnsupportedFileTypeError() - # read file content - file_content = file.read() - - # get file size - file_size = len(file_content) - - if extension.lower() in IMAGE_EXTENSIONS: + # select file size limit + if extension in IMAGE_EXTENSIONS: file_size_limit = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT * 1024 * 1024 + elif extension in VIDEO_EXTENSIONS: + file_size_limit = dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT * 1024 * 1024 + elif extension in AUDIO_EXTENSIONS: + file_size_limit = dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT * 1024 * 1024 else: file_size_limit = dify_config.UPLOAD_FILE_SIZE_LIMIT * 1024 * 1024 + # read file content + file_content = file.read() + # get file size + file_size = len(file_content) + + # check if the file size is exceeded if file_size > file_size_limit: message = f"File size exceeded. {file_size} > {file_size_limit}" raise FileTooLargeError(message) - # user uuid as file name + # generate file key file_uuid = str(uuid.uuid4()) if isinstance(user, Account): @@ -97,7 +85,7 @@ class FileService: size=file_size, extension=extension, mime_type=file.mimetype, - created_by_role=("account" if isinstance(user, Account) else "end_user"), + created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER), created_by=user.id, created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None), used=False, @@ -130,6 +118,7 @@ class FileService: extension="txt", mime_type="text/plain", created_by=current_user.id, + created_by_role=CreatedByRole.ACCOUNT, created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None), used=True, used_by=current_user.id, @@ -142,7 +131,7 @@ class FileService: return upload_file @staticmethod - def get_file_preview(file_id: str) -> str: + def get_file_preview(file_id: str): upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first() if not upload_file: @@ -150,9 +139,7 @@ class FileService: # extract text from file extension = upload_file.extension - etl_type = dify_config.ETL_TYPE - allowed_extensions = UNSTRUCTURED_ALLOWED_EXTENSIONS if etl_type == "Unstructured" else ALLOWED_EXTENSIONS - if extension.lower() not in allowed_extensions: + if extension.lower() not in DOCUMENT_EXTENSIONS: raise UnsupportedFileTypeError() text = ExtractProcessor.load_from_upload_file(upload_file, return_text=True) @@ -161,8 +148,10 @@ class FileService: return text @staticmethod - def get_image_preview(file_id: str, timestamp: str, nonce: str, sign: str) -> tuple[Generator, str]: - result = UploadFileParser.verify_image_file_signature(file_id, timestamp, nonce, sign) + def get_image_preview(file_id: str, timestamp: str, nonce: str, sign: str): + result = file_helpers.verify_image_signature( + upload_file_id=file_id, timestamp=timestamp, nonce=nonce, sign=sign + ) if not result: raise NotFound("File not found or signature is invalid") @@ -181,7 +170,22 @@ class FileService: return generator, upload_file.mime_type @staticmethod - def get_public_image_preview(file_id: str) -> tuple[Generator, str]: + def get_file_generator_by_file_id(file_id: str, timestamp: str, nonce: str, sign: str): + result = file_helpers.verify_file_signature(upload_file_id=file_id, timestamp=timestamp, nonce=nonce, sign=sign) + if not result: + raise NotFound("File not found or signature is invalid") + + upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first() + + if not upload_file: + raise NotFound("File not found or signature is invalid") + + generator = storage.load(upload_file.key, stream=True) + + return generator, upload_file + + @staticmethod + def get_public_image_preview(file_id: str): upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first() if not upload_file: diff --git a/api/services/tools/workflow_tools_manage_service.py b/api/services/tools/workflow_tools_manage_service.py index 5868ef3755..833881b668 100644 --- a/api/services/tools/workflow_tools_manage_service.py +++ b/api/services/tools/workflow_tools_manage_service.py @@ -1,6 +1,7 @@ import json +from collections.abc import Mapping from datetime import datetime -from typing import Optional +from typing import Any, Optional from sqlalchemy import or_ @@ -21,9 +22,9 @@ class WorkflowToolManageService: Service class for managing workflow tools. """ - @classmethod + @staticmethod def create_workflow_tool( - cls, + *, user_id: str, tenant_id: str, workflow_app_id: str, @@ -31,22 +32,10 @@ class WorkflowToolManageService: label: str, icon: dict, description: str, - parameters: list[dict], + parameters: Mapping[str, Any], privacy_policy: str = "", labels: Optional[list[str]] = None, ) -> dict: - """ - Create a workflow tool. - :param user_id: the user id - :param tenant_id: the tenant id - :param name: the name - :param icon: the icon - :param description: the description - :param parameters: the parameters - :param privacy_policy: the privacy policy - :param labels: labels - :return: the created tool - """ WorkflowToolConfigurationUtils.check_parameter_configurations(parameters) # check if the name is unique @@ -63,12 +52,11 @@ class WorkflowToolManageService: if existing_workflow_tool_provider is not None: raise ValueError(f"Tool with name {name} or app_id {workflow_app_id} already exists") - app: App = db.session.query(App).filter(App.id == workflow_app_id, App.tenant_id == tenant_id).first() - + app = db.session.query(App).filter(App.id == workflow_app_id, App.tenant_id == tenant_id).first() if app is None: raise ValueError(f"App {workflow_app_id} not found") - workflow: Workflow = app.workflow + workflow = app.workflow if workflow is None: raise ValueError(f"Workflow not found for app {workflow_app_id}") diff --git a/api/services/workflow/workflow_converter.py b/api/services/workflow/workflow_converter.py index db1a036e68..75c11afa94 100644 --- a/api/services/workflow/workflow_converter.py +++ b/api/services/workflow/workflow_converter.py @@ -13,12 +13,12 @@ from core.app.app_config.entities import ( from core.app.apps.agent_chat.app_config_manager import AgentChatAppConfigManager from core.app.apps.chat.app_config_manager import ChatAppConfigManager from core.app.apps.completion.app_config_manager import CompletionAppConfigManager -from core.file.file_obj import FileExtraConfig +from core.file.models import FileExtraConfig from core.helper import encrypter from core.model_runtime.entities.llm_entities import LLMMode from core.model_runtime.utils.encoders import jsonable_encoder from core.prompt.simple_prompt_transform import SimplePromptTransform -from core.workflow.entities.node_entities import NodeType +from core.workflow.nodes import NodeType from events.app_event import app_was_created from extensions.ext_database import db from models.account import Account @@ -522,7 +522,7 @@ class WorkflowConverter: "vision": { "enabled": file_upload is not None, "variable_selector": ["sys", "files"] if file_upload is not None else None, - "configs": {"detail": file_upload.image_config["detail"]} + "configs": {"detail": file_upload.image_config.detail} if file_upload is not None and file_upload.image_config is not None else None, }, diff --git a/api/services/workflow_app_service.py b/api/services/workflow_app_service.py index b4f0882a3a..f89487415d 100644 --- a/api/services/workflow_app_service.py +++ b/api/services/workflow_app_service.py @@ -4,9 +4,9 @@ from flask_sqlalchemy.pagination import Pagination from sqlalchemy import and_, or_ from extensions.ext_database import db -from models import CreatedByRole -from models.model import App, EndUser -from models.workflow import WorkflowAppLog, WorkflowRun, WorkflowRunStatus +from models import App, EndUser, WorkflowAppLog, WorkflowRun +from models.enums import CreatedByRole +from models.workflow import WorkflowRunStatus class WorkflowAppService: @@ -21,7 +21,7 @@ class WorkflowAppService: WorkflowAppLog.tenant_id == app_model.tenant_id, WorkflowAppLog.app_id == app_model.id ) - status = WorkflowRunStatus.value_of(args.get("status")) if args.get("status") else None + status = WorkflowRunStatus.value_of(args.get("status", "")) if args.get("status") else None keyword = args["keyword"] if keyword or status: query = query.join(WorkflowRun, WorkflowRun.id == WorkflowAppLog.workflow_run_id) @@ -42,7 +42,7 @@ class WorkflowAppService: query = query.outerjoin( EndUser, - and_(WorkflowRun.created_by == EndUser.id, WorkflowRun.created_by_role == CreatedByRole.END_USER.value), + and_(WorkflowRun.created_by == EndUser.id, WorkflowRun.created_by_role == CreatedByRole.END_USER), ).filter(or_(*keyword_conditions)) if status: diff --git a/api/services/workflow_run_service.py b/api/services/workflow_run_service.py index b7b3abeaa2..d8ee323908 100644 --- a/api/services/workflow_run_service.py +++ b/api/services/workflow_run_service.py @@ -1,11 +1,11 @@ from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination +from models.enums import WorkflowRunTriggeredFrom from models.model import App from models.workflow import ( WorkflowNodeExecution, WorkflowNodeExecutionTriggeredFrom, WorkflowRun, - WorkflowRunTriggeredFrom, ) diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index 0ff81f1f7e..7187d40517 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -6,19 +6,20 @@ from typing import Optional from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager -from core.app.segments import Variable from core.model_runtime.utils.encoders import jsonable_encoder -from core.workflow.entities.node_entities import NodeRunResult, NodeType +from core.variables import Variable +from core.workflow.entities.node_entities import NodeRunResult from core.workflow.errors import WorkflowNodeRunFailedError +from core.workflow.nodes import NodeType from core.workflow.nodes.event import RunCompletedEvent -from core.workflow.nodes.node_mapping import node_classes +from core.workflow.nodes.node_mapping import node_type_classes_mapping from core.workflow.workflow_entry import WorkflowEntry from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated from extensions.ext_database import db from models.account import Account +from models.enums import CreatedByRole from models.model import App, AppMode from models.workflow import ( - CreatedByRole, Workflow, WorkflowNodeExecution, WorkflowNodeExecutionStatus, @@ -175,7 +176,7 @@ class WorkflowService: """ # return default block config default_block_configs = [] - for node_type, node_class in node_classes.items(): + for node_type, node_class in node_type_classes_mapping.items(): default_config = node_class.get_default_config() if default_config: default_block_configs.append(default_config) @@ -189,10 +190,10 @@ class WorkflowService: :param filters: filter by node config parameters. :return: """ - node_type_enum: NodeType = NodeType.value_of(node_type) + node_type_enum: NodeType = NodeType(node_type) # return default block config - node_class = node_classes.get(node_type_enum) + node_class = node_type_classes_mapping.get(node_type_enum) if not node_class: return None @@ -251,7 +252,7 @@ class WorkflowService: workflow_node_execution.triggered_from = WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP.value workflow_node_execution.index = 1 workflow_node_execution.node_id = node_id - workflow_node_execution.node_type = node_instance.node_type.value + workflow_node_execution.node_type = node_instance.node_type workflow_node_execution.title = node_instance.node_data.title workflow_node_execution.elapsed_time = time.perf_counter() - start_at workflow_node_execution.created_by_role = CreatedByRole.ACCOUNT.value diff --git a/api/tasks/mail_reset_password_task.py b/api/tasks/mail_reset_password_task.py index cbb78976ca..8596ca07cf 100644 --- a/api/tasks/mail_reset_password_task.py +++ b/api/tasks/mail_reset_password_task.py @@ -5,17 +5,16 @@ import click from celery import shared_task from flask import render_template -from configs import dify_config from extensions.ext_mail import mail @shared_task(queue="mail") -def send_reset_password_mail_task(language: str, to: str, token: str): +def send_reset_password_mail_task(language: str, to: str, code: str): """ Async Send reset password mail :param language: Language in which the email should be sent (e.g., 'en', 'zh') :param to: Recipient email address - :param token: Reset password token to be included in the email + :param code: Reset password code """ if not mail.is_inited(): return @@ -25,13 +24,12 @@ def send_reset_password_mail_task(language: str, to: str, token: str): # send reset password mail using different languages try: - url = f"{dify_config.CONSOLE_WEB_URL}/forgot-password?token={token}" if language == "zh-Hans": - html_content = render_template("reset_password_mail_template_zh-CN.html", to=to, url=url) - mail.send(to=to, subject="重置您的 Dify 密码", html=html_content) + html_content = render_template("reset_password_mail_template_zh-CN.html", to=to, code=code) + mail.send(to=to, subject="设置您的 Dify 密码", html=html_content) else: - html_content = render_template("reset_password_mail_template_en-US.html", to=to, url=url) - mail.send(to=to, subject="Reset Your Dify Password", html=html_content) + html_content = render_template("reset_password_mail_template_en-US.html", to=to, code=code) + mail.send(to=to, subject="Set Your Dify Password", html=html_content) end_at = time.perf_counter() logging.info( diff --git a/api/templates/invite_member_mail_template_en-US.html b/api/templates/invite_member_mail_template_en-US.html index 80f7d42c20..e8bf7f5a52 100644 --- a/api/templates/invite_member_mail_template_en-US.html +++ b/api/templates/invite_member_mail_template_en-US.html @@ -59,7 +59,7 @@

Dear {{ to }},

{{ inviter_name }} is pleased to invite you to join our workspace on Dify, a platform specifically designed for LLM application development. On Dify, you can explore, create, and collaborate to build and operate AI applications.

-

You can now log in to Dify using the GitHub or Google account associated with this email.

+

Click the button below to log in to Dify and join the workspace.

Login Here

diff --git a/web/app/(commonLayout)/apps/Apps.tsx b/web/app/(commonLayout)/apps/Apps.tsx index accf6c67f2..9d6345aa6c 100644 --- a/web/app/(commonLayout)/apps/Apps.tsx +++ b/web/app/(commonLayout)/apps/Apps.tsx @@ -21,7 +21,7 @@ import { NEED_REFRESH_APP_LIST_KEY } from '@/config' import { CheckModal } from '@/hooks/use-pay' import TabSliderNew from '@/app/components/base/tab-slider-new' import { useTabSearchParams } from '@/hooks/use-tab-searchparams' -import SearchInput from '@/app/components/base/search-input' +import Input from '@/app/components/base/input' import { useStore as useTagStore } from '@/app/components/base/tag-management/store' import TagManagementModal from '@/app/components/base/tag-management' import TagFilter from '@/app/components/base/tag-management/filter' @@ -133,7 +133,14 @@ const Apps = () => { />
- + handleKeywordsChange(e.target.value)} + onClear={() => handleKeywordsChange('')} + />