diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml
index e424171019..aa7e68dbac 100644
--- a/.github/workflows/api-tests.yml
+++ b/.github/workflows/api-tests.yml
@@ -75,7 +75,7 @@ jobs:
- name: Run Workflow
run: poetry run -C api bash dev/pytest/pytest_workflow.sh
- - name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma)
+ - name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale)
uses: hoverkraft-tech/compose-action@v2.0.0
with:
compose-file: |
@@ -89,5 +89,6 @@ jobs:
pgvecto-rs
pgvector
chroma
+ myscale
- name: Test Vector Stores
run: poetry run -C api bash dev/pytest/pytest_vdb.sh
diff --git a/api/.env.example b/api/.env.example
index 1f6e6f69b7..228218be0d 100644
--- a/api/.env.example
+++ b/api/.env.example
@@ -83,7 +83,7 @@ OCI_REGION=your-region
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
-# Vector database configuration, support: weaviate, qdrant, milvus, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector
+# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector
VECTOR_STORE=weaviate
# Weaviate configuration
@@ -106,6 +106,14 @@ MILVUS_USER=root
MILVUS_PASSWORD=Milvus
MILVUS_SECURE=false
+# MyScale configuration
+MYSCALE_HOST=127.0.0.1
+MYSCALE_PORT=8123
+MYSCALE_USER=default
+MYSCALE_PASSWORD=
+MYSCALE_DATABASE=default
+MYSCALE_FTS_PARAMS=
+
# Relyt configuration
RELYT_HOST=127.0.0.1
RELYT_PORT=5432
diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py
index 067bcd7af4..a32b70bdc7 100644
--- a/api/configs/middleware/__init__.py
+++ b/api/configs/middleware/__init__.py
@@ -13,6 +13,7 @@ from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCO
from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig
from configs.middleware.vdb.chroma_config import ChromaConfig
from configs.middleware.vdb.milvus_config import MilvusConfig
+from configs.middleware.vdb.myscale_config import MyScaleConfig
from configs.middleware.vdb.opensearch_config import OpenSearchConfig
from configs.middleware.vdb.oracle_config import OracleConfig
from configs.middleware.vdb.pgvector_config import PGVectorConfig
@@ -187,6 +188,7 @@ class MiddlewareConfig(
AnalyticdbConfig,
ChromaConfig,
MilvusConfig,
+ MyScaleConfig,
OpenSearchConfig,
OracleConfig,
PGVectorConfig,
diff --git a/api/configs/middleware/vdb/myscale_config.py b/api/configs/middleware/vdb/myscale_config.py
new file mode 100644
index 0000000000..e513cad0e8
--- /dev/null
+++ b/api/configs/middleware/vdb/myscale_config.py
@@ -0,0 +1,39 @@
+from typing import Optional
+
+from pydantic import BaseModel, Field, PositiveInt
+
+
+class MyScaleConfig(BaseModel):
+ """
+ MyScale configs
+ """
+
+ MYSCALE_HOST: Optional[str] = Field(
+ description='MyScale host',
+ default=None,
+ )
+
+ MYSCALE_PORT: Optional[PositiveInt] = Field(
+ description='MyScale port',
+ default=8123,
+ )
+
+ MYSCALE_USER: Optional[str] = Field(
+ description='MyScale user',
+ default=None,
+ )
+
+ MYSCALE_PASSWORD: Optional[str] = Field(
+ description='MyScale password',
+ default=None,
+ )
+
+ MYSCALE_DATABASE: Optional[str] = Field(
+ description='MyScale database name',
+ default=None,
+ )
+
+ MYSCALE_FTS_PARAMS: Optional[str] = Field(
+ description='MyScale fts index parameters',
+ default=None,
+ )
diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py
index 50edec33c3..70c506bb0e 100644
--- a/api/controllers/console/datasets/datasets.py
+++ b/api/controllers/console/datasets/datasets.py
@@ -548,7 +548,7 @@ class DatasetRetrievalSettingApi(Resource):
RetrievalMethod.SEMANTIC_SEARCH
]
}
- case VectorType.QDRANT | VectorType.WEAVIATE | VectorType.OPENSEARCH | VectorType.ANALYTICDB:
+ case VectorType.QDRANT | VectorType.WEAVIATE | VectorType.OPENSEARCH | VectorType.ANALYTICDB | VectorType.MYSCALE:
return {
'retrieval_method': [
RetrievalMethod.SEMANTIC_SEARCH,
@@ -572,7 +572,7 @@ class DatasetRetrievalSettingMockApi(Resource):
RetrievalMethod.SEMANTIC_SEARCH
]
}
- case VectorType.QDRANT | VectorType.WEAVIATE | VectorType.OPENSEARCH| VectorType.ANALYTICDB:
+ case VectorType.QDRANT | VectorType.WEAVIATE | VectorType.OPENSEARCH| VectorType.ANALYTICDB | VectorType.MYSCALE:
return {
'retrieval_method': [
RetrievalMethod.SEMANTIC_SEARCH,
diff --git a/api/controllers/inner_api/wraps.py b/api/controllers/inner_api/wraps.py
index 07cd38bc85..2c3c870bce 100644
--- a/api/controllers/inner_api/wraps.py
+++ b/api/controllers/inner_api/wraps.py
@@ -3,8 +3,9 @@ from functools import wraps
from hashlib import sha1
from hmac import new as hmac_new
-from flask import abort, current_app, request
+from flask import abort, request
+from configs import dify_config
from extensions.ext_database import db
from models.model import EndUser
@@ -12,12 +13,12 @@ from models.model import EndUser
def inner_api_only(view):
@wraps(view)
def decorated(*args, **kwargs):
- if not current_app.config['INNER_API']:
+ if not dify_config.INNER_API:
abort(404)
# get header 'X-Inner-Api-Key'
inner_api_key = request.headers.get('X-Inner-Api-Key')
- if not inner_api_key or inner_api_key != current_app.config['INNER_API_KEY']:
+ if not inner_api_key or inner_api_key != dify_config.INNER_API_KEY:
abort(404)
return view(*args, **kwargs)
@@ -28,7 +29,7 @@ def inner_api_only(view):
def inner_api_user_auth(view):
@wraps(view)
def decorated(*args, **kwargs):
- if not current_app.config['INNER_API']:
+ if not dify_config.INNER_API:
return view(*args, **kwargs)
# get header 'X-Inner-Api-Key'
diff --git a/api/controllers/service_api/app/app.py b/api/controllers/service_api/app/app.py
index bccce9b55b..3b3cf1b026 100644
--- a/api/controllers/service_api/app/app.py
+++ b/api/controllers/service_api/app/app.py
@@ -1,7 +1,7 @@
-from flask import current_app
from flask_restful import Resource, fields, marshal_with
+from configs import dify_config
from controllers.service_api import api
from controllers.service_api.app.error import AppUnavailableError
from controllers.service_api.wraps import validate_app_token
@@ -78,7 +78,7 @@ class AppParameterApi(Resource):
"transfer_methods": ["remote_url", "local_file"]
}}),
'system_parameters': {
- 'image_file_size_limit': current_app.config.get('UPLOAD_IMAGE_FILE_SIZE_LIMIT')
+ 'image_file_size_limit': dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT
}
}
diff --git a/api/controllers/service_api/index.py b/api/controllers/service_api/index.py
index 932388b562..c910063ebd 100644
--- a/api/controllers/service_api/index.py
+++ b/api/controllers/service_api/index.py
@@ -1,6 +1,6 @@
-from flask import current_app
from flask_restful import Resource
+from configs import dify_config
from controllers.service_api import api
@@ -9,7 +9,7 @@ class IndexApi(Resource):
return {
"welcome": "Dify OpenAPI",
"api_version": "v1",
- "server_version": current_app.config['CURRENT_VERSION']
+ "server_version": dify_config.CURRENT_VERSION,
}
diff --git a/api/controllers/web/app.py b/api/controllers/web/app.py
index 91d9015c33..f4db82552c 100644
--- a/api/controllers/web/app.py
+++ b/api/controllers/web/app.py
@@ -1,6 +1,6 @@
-from flask import current_app
from flask_restful import fields, marshal_with
+from configs import dify_config
from controllers.web import api
from controllers.web.error import AppUnavailableError
from controllers.web.wraps import WebApiResource
@@ -75,7 +75,7 @@ class AppParameterApi(WebApiResource):
"transfer_methods": ["remote_url", "local_file"]
}}),
'system_parameters': {
- 'image_file_size_limit': current_app.config.get('UPLOAD_IMAGE_FILE_SIZE_LIMIT')
+ 'image_file_size_limit': dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT
}
}
diff --git a/api/controllers/web/site.py b/api/controllers/web/site.py
index c307959b20..99ec86e935 100644
--- a/api/controllers/web/site.py
+++ b/api/controllers/web/site.py
@@ -1,8 +1,8 @@
-from flask import current_app
from flask_restful import fields, marshal_with
from werkzeug.exceptions import Forbidden
+from configs import dify_config
from controllers.web import api
from controllers.web.wraps import WebApiResource
from extensions.ext_database import db
@@ -84,7 +84,7 @@ class AppSiteInfo:
self.can_replace_logo = can_replace_logo
if can_replace_logo:
- base_url = current_app.config.get('FILES_URL')
+ base_url = dify_config.FILES_URL
remove_webapp_brand = tenant.custom_config_dict.get('remove_webapp_brand', False)
replace_webapp_logo = f'{base_url}/files/workspaces/{tenant.id}/webapp-logo' if tenant.custom_config_dict.get('replace_webapp_logo') else None
self.custom_config = {
diff --git a/api/core/app/apps/advanced_chat/app_runner.py b/api/core/app/apps/advanced_chat/app_runner.py
index 96e9319dda..3856621700 100644
--- a/api/core/app/apps/advanced_chat/app_runner.py
+++ b/api/core/app/apps/advanced_chat/app_runner.py
@@ -255,6 +255,12 @@ class AdvancedChatAppRunner(AppRunner):
)
index += 1
time.sleep(0.01)
+ else:
+ queue_manager.publish(
+ QueueTextChunkEvent(
+ text=text
+ ), PublishFrom.APPLICATION_MANAGER
+ )
queue_manager.publish(
QueueStopEvent(stopped_by=stopped_by),
diff --git a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml
index 80607ca9e5..d50529926b 100644
--- a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml
+++ b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml
@@ -7,7 +7,7 @@ features:
- agent-thought
model_properties:
mode: chat
- context_size: 32000
+ context_size: 128000
parameter_rules:
- name: temperature
use_template: temperature
diff --git a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml
index f6ce775d76..4da75b9aa3 100644
--- a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml
+++ b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml
@@ -7,7 +7,7 @@ features:
- agent-thought
model_properties:
mode: chat
- context_size: 32000
+ context_size: 128000
parameter_rules:
- name: temperature
use_template: temperature
diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py b/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py
index b76f460737..e5cc884b6d 100644
--- a/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py
+++ b/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py
@@ -616,30 +616,34 @@ class OAIAPICompatLargeLanguageModel(_CommonOAI_API_Compat, LargeLanguageModel):
message = cast(AssistantPromptMessage, message)
message_dict = {"role": "assistant", "content": message.content}
if message.tool_calls:
- # message_dict["tool_calls"] = [helper.dump_model(PromptMessageFunction(function=tool_call)) for tool_call
- # in
- # message.tool_calls]
-
- function_call = message.tool_calls[0]
- message_dict["function_call"] = {
- "name": function_call.function.name,
- "arguments": function_call.function.arguments,
- }
+ function_calling_type = credentials.get('function_calling_type', 'no_call')
+ if function_calling_type == 'tool_call':
+ message_dict["tool_calls"] = [tool_call.dict() for tool_call in
+ message.tool_calls]
+ elif function_calling_type == 'function_call':
+ function_call = message.tool_calls[0]
+ message_dict["function_call"] = {
+ "name": function_call.function.name,
+ "arguments": function_call.function.arguments,
+ }
elif isinstance(message, SystemPromptMessage):
message = cast(SystemPromptMessage, message)
message_dict = {"role": "system", "content": message.content}
elif isinstance(message, ToolPromptMessage):
message = cast(ToolPromptMessage, message)
- # message_dict = {
- # "role": "tool",
- # "content": message.content,
- # "tool_call_id": message.tool_call_id
- # }
- message_dict = {
- "role": "tool" if credentials and credentials.get('function_calling_type', 'no_call') == 'tool_call' else "function",
- "content": message.content,
- "name": message.tool_call_id
- }
+ function_calling_type = credentials.get('function_calling_type', 'no_call')
+ if function_calling_type == 'tool_call':
+ message_dict = {
+ "role": "tool",
+ "content": message.content,
+ "tool_call_id": message.tool_call_id
+ }
+ elif function_calling_type == 'function_call':
+ message_dict = {
+ "role": "function",
+ "content": message.content,
+ "name": message.tool_call_id
+ }
else:
raise ValueError(f"Got unknown type {message}")
diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-turbo-8k-preview b/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-turbo-8k-preview
new file mode 100644
index 0000000000..16df540220
--- /dev/null
+++ b/api/core/model_runtime/model_providers/wenxin/llm/ernie-4.0-turbo-8k-preview
@@ -0,0 +1,40 @@
+model: ernie-4.0-turbo-8k-preview
+label:
+ en_US: Ernie-4.0-turbo-8k-preview
+model_type: llm
+features:
+ - agent-thought
+model_properties:
+ mode: chat
+ context_size: 8192
+parameter_rules:
+ - name: temperature
+ use_template: temperature
+ min: 0.1
+ max: 1.0
+ default: 0.8
+ - name: top_p
+ use_template: top_p
+ - name: max_tokens
+ use_template: max_tokens
+ default: 1024
+ min: 2
+ max: 2048
+ - name: presence_penalty
+ use_template: presence_penalty
+ default: 1.0
+ min: 1.0
+ max: 2.0
+ - name: frequency_penalty
+ use_template: frequency_penalty
+ - name: response_format
+ use_template: response_format
+ - name: disable_search
+ label:
+ zh_Hans: 禁用搜索
+ en_US: Disable Search
+ type: boolean
+ help:
+ zh_Hans: 禁用模型自行进行外部搜索。
+ en_US: Disable the model to perform external search.
+ required: false
diff --git a/api/core/model_runtime/model_providers/wenxin/llm/ernie_bot.py b/api/core/model_runtime/model_providers/wenxin/llm/ernie_bot.py
index 305769c1c1..9aeab04cd2 100644
--- a/api/core/model_runtime/model_providers/wenxin/llm/ernie_bot.py
+++ b/api/core/model_runtime/model_providers/wenxin/llm/ernie_bot.py
@@ -138,6 +138,7 @@ class ErnieBotModel:
'ernie-lite-8k-0922': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant',
'ernie-lite-8k-0308': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-lite-8k',
'ernie-character-8k-0321': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-char-8k',
+ 'ernie-4.0-tutbo-8k-preview': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-4.0-turbo-8k-preview',
}
function_calling_supports = [
@@ -149,6 +150,7 @@ class ErnieBotModel:
'ernie-3.5-4k-0205',
'ernie-3.5-128k',
'ernie-4.0-8k'
+ 'ernie-4.0-turbo-8k-preview'
]
api_key: str = ''
diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/images.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/images.py
index 3201426dfa..8eae1216d0 100644
--- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/images.py
+++ b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/images.py
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING, Optional
import httpx
from ..core._base_api import BaseAPI
-from ..core._base_type import NOT_GIVEN, Headers, NotGiven
+from ..core._base_type import NOT_GIVEN, Body, Headers, NotGiven
from ..core._http_client import make_user_request_input
from ..types.image import ImagesResponded
@@ -28,7 +28,9 @@ class Images(BaseAPI):
size: Optional[str] | NotGiven = NOT_GIVEN,
style: Optional[str] | NotGiven = NOT_GIVEN,
user: str | NotGiven = NOT_GIVEN,
+ request_id: Optional[str] | NotGiven = NOT_GIVEN,
extra_headers: Headers | None = None,
+ extra_body: Body | None = None,
disable_strict_validation: Optional[bool] | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> ImagesResponded:
@@ -46,9 +48,12 @@ class Images(BaseAPI):
"size": size,
"style": style,
"user": user,
+ "request_id": request_id,
},
options=make_user_request_input(
- extra_headers=extra_headers, timeout=timeout
+ extra_headers=extra_headers,
+ extra_body=extra_body,
+ timeout=timeout
),
cast_type=_cast_type,
enable_stream=False,
diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_http_client.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_http_client.py
index 924d009123..263fe82990 100644
--- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_http_client.py
+++ b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_http_client.py
@@ -11,7 +11,7 @@ from tenacity import retry
from tenacity.stop import stop_after_attempt
from . import _errors
-from ._base_type import NOT_GIVEN, Body, Data, Headers, NotGiven, Query, RequestFiles, ResponseT
+from ._base_type import NOT_GIVEN, AnyMapping, Body, Data, Headers, NotGiven, Query, RequestFiles, ResponseT
from ._errors import APIResponseValidationError, APIStatusError, APITimeoutError
from ._files import make_httpx_files
from ._request_opt import ClientRequestParam, UserRequestInput
@@ -358,6 +358,7 @@ def make_user_request_input(
max_retries: int | None = None,
timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
extra_headers: Headers = None,
+ extra_body: Body | None = None,
query: Query | None = None,
) -> UserRequestInput:
options: UserRequestInput = {}
@@ -370,5 +371,7 @@ def make_user_request_input(
options['timeout'] = timeout
if query is not None:
options["params"] = query
+ if extra_body is not None:
+ options["extra_json"] = cast(AnyMapping, extra_body)
return options
diff --git a/api/core/rag/datasource/keyword/keyword_factory.py b/api/core/rag/datasource/keyword/keyword_factory.py
index beb3322aa6..6ac610f82b 100644
--- a/api/core/rag/datasource/keyword/keyword_factory.py
+++ b/api/core/rag/datasource/keyword/keyword_factory.py
@@ -1,7 +1,6 @@
from typing import Any
-from flask import current_app
-
+from configs import dify_config
from core.rag.datasource.keyword.jieba.jieba import Jieba
from core.rag.datasource.keyword.keyword_base import BaseKeyword
from core.rag.models.document import Document
@@ -14,8 +13,8 @@ class Keyword:
self._keyword_processor = self._init_keyword()
def _init_keyword(self) -> BaseKeyword:
- config = current_app.config
- keyword_type = config.get('KEYWORD_STORE')
+ config = dify_config
+ keyword_type = config.KEYWORD_STORE
if not keyword_type:
raise ValueError("Keyword store must be specified.")
diff --git a/api/core/rag/datasource/vdb/myscale/__init__.py b/api/core/rag/datasource/vdb/myscale/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/api/core/rag/datasource/vdb/myscale/myscale_vector.py b/api/core/rag/datasource/vdb/myscale/myscale_vector.py
new file mode 100644
index 0000000000..811b08818c
--- /dev/null
+++ b/api/core/rag/datasource/vdb/myscale/myscale_vector.py
@@ -0,0 +1,170 @@
+import json
+import logging
+import uuid
+from enum import Enum
+from typing import Any
+
+from clickhouse_connect import get_client
+from flask import current_app
+from pydantic import BaseModel
+
+from core.rag.datasource.entity.embedding import Embeddings
+from core.rag.datasource.vdb.vector_base import BaseVector
+from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
+from core.rag.datasource.vdb.vector_type import VectorType
+from core.rag.models.document import Document
+from models.dataset import Dataset
+
+
+class MyScaleConfig(BaseModel):
+ host: str
+ port: int
+ user: str
+ password: str
+ database: str
+ fts_params: str
+
+
+class SortOrder(Enum):
+ ASC = "ASC"
+ DESC = "DESC"
+
+
+class MyScaleVector(BaseVector):
+
+ def __init__(self, collection_name: str, config: MyScaleConfig, metric: str = "Cosine"):
+ super().__init__(collection_name)
+ self._config = config
+ self._metric = metric
+ self._vec_order = SortOrder.ASC if metric.upper() in ["COSINE", "L2"] else SortOrder.DESC
+ self._client = get_client(
+ host=config.host,
+ port=config.port,
+ username=config.user,
+ password=config.password,
+ )
+ self._client.command("SET allow_experimental_object_type=1")
+
+ def get_type(self) -> str:
+ return VectorType.MYSCALE
+
+ def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
+ dimension = len(embeddings[0])
+ self._create_collection(dimension)
+ return self.add_texts(documents=texts, embeddings=embeddings, **kwargs)
+
+ def _create_collection(self, dimension: int):
+ logging.info(f"create MyScale collection {self._collection_name} with dimension {dimension}")
+ self._client.command(f"CREATE DATABASE IF NOT EXISTS {self._config.database}")
+ fts_params = f"('{self._config.fts_params}')" if self._config.fts_params else ""
+ sql = f"""
+ CREATE TABLE IF NOT EXISTS {self._config.database}.{self._collection_name}(
+ id String,
+ text String,
+ vector Array(Float32),
+ metadata JSON,
+ CONSTRAINT cons_vec_len CHECK length(vector) = {dimension},
+ VECTOR INDEX vidx vector TYPE DEFAULT('metric_type = {self._metric}'),
+ INDEX text_idx text TYPE fts{fts_params}
+ ) ENGINE = MergeTree ORDER BY id
+ """
+ self._client.command(sql)
+
+ def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
+ ids = []
+ columns = ["id", "text", "vector", "metadata"]
+ values = []
+ for i, doc in enumerate(documents):
+ doc_id = doc.metadata.get("doc_id", str(uuid.uuid4()))
+ row = (
+ doc_id,
+ self.escape_str(doc.page_content),
+ embeddings[i],
+ json.dumps(doc.metadata) if doc.metadata else {}
+ )
+ values.append(str(row))
+ ids.append(doc_id)
+ sql = f"""
+ INSERT INTO {self._config.database}.{self._collection_name}
+ ({",".join(columns)}) VALUES {",".join(values)}
+ """
+ self._client.command(sql)
+ return ids
+
+ @staticmethod
+ def escape_str(value: Any) -> str:
+ return "".join(f"\\{c}" if c in ("\\", "'") else c for c in str(value))
+
+ def text_exists(self, id: str) -> bool:
+ results = self._client.query(f"SELECT id FROM {self._config.database}.{self._collection_name} WHERE id='{id}'")
+ return results.row_count > 0
+
+ def delete_by_ids(self, ids: list[str]) -> None:
+ self._client.command(
+ f"DELETE FROM {self._config.database}.{self._collection_name} WHERE id IN {str(tuple(ids))}")
+
+ def get_ids_by_metadata_field(self, key: str, value: str):
+ rows = self._client.query(
+ f"SELECT DISTINCT id FROM {self._config.database}.{self._collection_name} WHERE metadata.{key}='{value}'"
+ ).result_rows
+ return [row[0] for row in rows]
+
+ def delete_by_metadata_field(self, key: str, value: str) -> None:
+ self._client.command(
+ f"DELETE FROM {self._config.database}.{self._collection_name} WHERE metadata.{key}='{value}'"
+ )
+
+ def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
+ return self._search(f"distance(vector, {str(query_vector)})", self._vec_order, **kwargs)
+
+ def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
+ return self._search(f"TextSearch(text, '{query}')", SortOrder.DESC, **kwargs)
+
+ def _search(self, dist: str, order: SortOrder, **kwargs: Any) -> list[Document]:
+ top_k = kwargs.get("top_k", 5)
+ score_threshold = kwargs.get("score_threshold", 0.0)
+ where_str = f"WHERE dist < {1 - score_threshold}" if \
+ self._metric.upper() == "COSINE" and order == SortOrder.ASC and score_threshold > 0.0 else ""
+ sql = f"""
+ SELECT text, metadata, {dist} as dist FROM {self._config.database}.{self._collection_name}
+ {where_str} ORDER BY dist {order.value} LIMIT {top_k}
+ """
+ try:
+ return [
+ Document(
+ page_content=r["text"],
+ metadata=r["metadata"],
+ )
+ for r in self._client.query(sql).named_results()
+ ]
+ except Exception as e:
+ logging.error(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m")
+ return []
+
+ def delete(self) -> None:
+ self._client.command(f"DROP TABLE IF EXISTS {self._config.database}.{self._collection_name}")
+
+
+class MyScaleVectorFactory(AbstractVectorFactory):
+ def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> MyScaleVector:
+ if dataset.index_struct_dict:
+ class_prefix: str = dataset.index_struct_dict['vector_store']['class_prefix']
+ collection_name = class_prefix.lower()
+ else:
+ dataset_id = dataset.id
+ collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower()
+ dataset.index_struct = json.dumps(
+ self.gen_index_struct_dict(VectorType.MYSCALE, collection_name))
+
+ config = current_app.config
+ return MyScaleVector(
+ collection_name=collection_name,
+ config=MyScaleConfig(
+ host=config.get("MYSCALE_HOST", "localhost"),
+ port=int(config.get("MYSCALE_PORT", 8123)),
+ user=config.get("MYSCALE_USER", "default"),
+ password=config.get("MYSCALE_PASSWORD", ""),
+ database=config.get("MYSCALE_DATABASE", "default"),
+ fts_params=config.get("MYSCALE_FTS_PARAMS", ""),
+ ),
+ )
diff --git a/api/core/rag/datasource/vdb/vector_factory.py b/api/core/rag/datasource/vdb/vector_factory.py
index b7733029f7..f8b58e1b9a 100644
--- a/api/core/rag/datasource/vdb/vector_factory.py
+++ b/api/core/rag/datasource/vdb/vector_factory.py
@@ -57,6 +57,9 @@ class Vector:
case VectorType.MILVUS:
from core.rag.datasource.vdb.milvus.milvus_vector import MilvusVectorFactory
return MilvusVectorFactory
+ case VectorType.MYSCALE:
+ from core.rag.datasource.vdb.myscale.myscale_vector import MyScaleVectorFactory
+ return MyScaleVectorFactory
case VectorType.PGVECTOR:
from core.rag.datasource.vdb.pgvector.pgvector import PGVectorFactory
return PGVectorFactory
diff --git a/api/core/rag/datasource/vdb/vector_type.py b/api/core/rag/datasource/vdb/vector_type.py
index 32c8713fda..77495044df 100644
--- a/api/core/rag/datasource/vdb/vector_type.py
+++ b/api/core/rag/datasource/vdb/vector_type.py
@@ -5,6 +5,7 @@ class VectorType(str, Enum):
ANALYTICDB = 'analyticdb'
CHROMA = 'chroma'
MILVUS = 'milvus'
+ MYSCALE = 'myscale'
PGVECTOR = 'pgvector'
PGVECTO_RS = 'pgvecto-rs'
QDRANT = 'qdrant'
diff --git a/api/core/rag/extractor/firecrawl/firecrawl_app.py b/api/core/rag/extractor/firecrawl/firecrawl_app.py
index af6b568936..2b85ad9739 100644
--- a/api/core/rag/extractor/firecrawl/firecrawl_app.py
+++ b/api/core/rag/extractor/firecrawl/firecrawl_app.py
@@ -46,7 +46,6 @@ class FirecrawlApp:
raise Exception(f'Failed to scrape URL. Status code: {response.status_code}')
def crawl_url(self, url, params=None) -> str:
- start_time = time.time()
headers = self._prepare_headers()
json_data = {'url': url}
if params:
diff --git a/api/core/rag/extractor/markdown_extractor.py b/api/core/rag/extractor/markdown_extractor.py
index 91c687bac9..faa1e64057 100644
--- a/api/core/rag/extractor/markdown_extractor.py
+++ b/api/core/rag/extractor/markdown_extractor.py
@@ -18,8 +18,8 @@ class MarkdownExtractor(BaseExtractor):
def __init__(
self,
file_path: str,
- remove_hyperlinks: bool = True,
- remove_images: bool = True,
+ remove_hyperlinks: bool = False,
+ remove_images: bool = False,
encoding: Optional[str] = None,
autodetect_encoding: bool = True,
):
diff --git a/api/core/tools/docs/en_US/advanced_scale_out.md b/api/core/tools/docs/en_US/advanced_scale_out.md
index 56c8509785..644ad29129 100644
--- a/api/core/tools/docs/en_US/advanced_scale_out.md
+++ b/api/core/tools/docs/en_US/advanced_scale_out.md
@@ -8,7 +8,7 @@ We have defined a series of helper methods in the `Tool` class to help developer
### Message Return
-Dify supports various message types such as `text`, `link`, `image`, and `file BLOB`. You can return different types of messages to the LLM and users through the following interfaces.
+Dify supports various message types such as `text`, `link`, `json`, `image`, and `file BLOB`. You can return different types of messages to the LLM and users through the following interfaces.
Please note, some parameters in the following interfaces will be introduced in later sections.
@@ -67,6 +67,18 @@ If you need to return the raw data of a file, such as images, audio, video, PPT,
"""
```
+#### JSON
+If you need to return a formatted JSON, you can use the following interface. This is commonly used for data transmission between nodes in a workflow, of course, in agent mode, most LLM are also able to read and understand JSON.
+
+- `object` A Python dictionary object will be automatically serialized into JSON
+
+```python
+ def create_json_message(self, object: dict) -> ToolInvokeMessage:
+ """
+ create a json message
+ """
+```
+
### Shortcut Tools
In large model applications, we have two common needs:
diff --git a/api/core/tools/docs/en_US/tool_scale_out.md b/api/core/tools/docs/en_US/tool_scale_out.md
index f75c91cad6..121b7a5a76 100644
--- a/api/core/tools/docs/en_US/tool_scale_out.md
+++ b/api/core/tools/docs/en_US/tool_scale_out.md
@@ -145,19 +145,25 @@ parameters: # Parameter list
- The `identity` field is mandatory, it contains the basic information of the tool, including name, author, label, description, etc.
- `parameters` Parameter list
- - `name` Parameter name, unique, no duplication with other parameters
- - `type` Parameter type, currently supports `string`, `number`, `boolean`, `select`, `secret-input` four types, corresponding to string, number, boolean, drop-down box, and encrypted input box, respectively. For sensitive information, we recommend using `secret-input` type
- - `required` Required or not
+ - `name` (Mandatory) Parameter name, must be unique and not duplicate with other parameters.
+ - `type` (Mandatory) Parameter type, currently supports `string`, `number`, `boolean`, `select`, `secret-input` five types, corresponding to string, number, boolean, drop-down box, and encrypted input box, respectively. For sensitive information, we recommend using the `secret-input` type
+ - `label` (Mandatory) Parameter label, for frontend display
+ - `form` (Mandatory) Form type, currently supports `llm`, `form` two types.
+ - In an agent app, `llm` indicates that the parameter is inferred by the LLM itself, while `form` indicates that the parameter can be pre-set for the tool.
+ - In a workflow app, both `llm` and `form` need to be filled out by the front end, but the parameters of `llm` will be used as input variables for the tool node.
+ - `required` Indicates whether the parameter is required or not
- In `llm` mode, if the parameter is required, the Agent is required to infer this parameter
- In `form` mode, if the parameter is required, the user is required to fill in this parameter on the frontend before the conversation starts
- `options` Parameter options
- In `llm` mode, Dify will pass all options to LLM, LLM can infer based on these options
- In `form` mode, when `type` is `select`, the frontend will display these options
- `default` Default value
- - `label` Parameter label, for frontend display
+ - `min` Minimum value, can be set when the parameter type is `number`.
+ - `max` Maximum value, can be set when the parameter type is `number`.
+ - `placeholder` The prompt text for input boxes. It can be set when the form type is `form`, and the parameter type is `string`, `number`, or `secret-input`. It supports multiple languages.
- `human_description` Introduction for frontend display, supports multiple languages
- `llm_description` Introduction passed to LLM, in order to make LLM better understand this parameter, we suggest to write as detailed information about this parameter as possible here, so that LLM can understand this parameter
- - `form` Form type, currently supports `llm`, `form` two types, corresponding to Agent self-inference and frontend filling
+
## 4. Add Tool Logic
@@ -196,7 +202,7 @@ The overall logic of the tool is in the `_invoke` method, this method accepts tw
### Return Data
-When the tool returns, you can choose to return one message or multiple messages, here we return one message, using `create_text_message` and `create_link_message` can create a text message or a link message.
+When the tool returns, you can choose to return one message or multiple messages, here we return one message, using `create_text_message` and `create_link_message` can create a text message or a link message. If you want to return multiple messages, you can use `[self.create_text_message('msg1'), self.create_text_message('msg2')]` to create a list of messages.
## 5. Add Provider Code
@@ -205,8 +211,6 @@ Finally, we need to create a provider class under the provider module to impleme
Create `google.py` under the `google` module, the content is as follows.
```python
-from core.tools.entities.tool_entities import ToolInvokeMessage, ToolProviderType
-from core.tools.tool.tool import Tool
from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController
from core.tools.errors import ToolProviderCredentialValidationError
diff --git a/api/core/tools/docs/zh_Hans/advanced_scale_out.md b/api/core/tools/docs/zh_Hans/advanced_scale_out.md
index 3a760e7a72..93f81b033d 100644
--- a/api/core/tools/docs/zh_Hans/advanced_scale_out.md
+++ b/api/core/tools/docs/zh_Hans/advanced_scale_out.md
@@ -8,7 +8,7 @@
### 消息返回
-Dify支持`文本` `链接` `图片` `文件BLOB` 等多种消息类型,你可以通过以下几个接口返回不同类型的消息给LLM和用户。
+Dify支持`文本` `链接` `图片` `文件BLOB` `JSON` 等多种消息类型,你可以通过以下几个接口返回不同类型的消息给LLM和用户。
注意,在下面的接口中的部分参数将在后面的章节中介绍。
@@ -67,6 +67,18 @@ Dify支持`文本` `链接` `图片` `文件BLOB` 等多种消息类型,你可
"""
```
+#### JSON
+如果你需要返回一个格式化的JSON,可以使用以下接口。这通常用于workflow中的节点间的数据传递,当然agent模式中,大部分大模型也都能够阅读和理解JSON。
+
+- `object` 一个Python的字典对象,会被自动序列化为JSON
+
+```python
+ def create_json_message(self, object: dict) -> ToolInvokeMessage:
+ """
+ create a json message
+ """
+```
+
### 快捷工具
在大模型应用中,我们有两种常见的需求:
@@ -97,8 +109,8 @@ Dify支持`文本` `链接` `图片` `文件BLOB` 等多种消息类型,你可
```python
def get_url(self, url: str, user_agent: str = None) -> str:
"""
- get url
- """ the crawled result
+ get url from the crawled result
+ """
```
### 变量池
diff --git a/api/core/tools/docs/zh_Hans/tool_scale_out.md b/api/core/tools/docs/zh_Hans/tool_scale_out.md
index 20f0f935e8..06a8d9a4f9 100644
--- a/api/core/tools/docs/zh_Hans/tool_scale_out.md
+++ b/api/core/tools/docs/zh_Hans/tool_scale_out.md
@@ -140,8 +140,12 @@ parameters: # 参数列表
- `identity` 字段是必须的,它包含了工具的基本信息,包括名称、作者、标签、描述等
- `parameters` 参数列表
- - `name` 参数名称,唯一,不允许和其他参数重名
- - `type` 参数类型,目前支持`string`、`number`、`boolean`、`select`、`secret-input` 五种类型,分别对应字符串、数字、布尔值、下拉框、加密输入框,对于敏感信息,我们建议使用`secret-input`类型
+ - `name` (必填)参数名称,唯一,不允许和其他参数重名
+ - `type` (必填)参数类型,目前支持`string`、`number`、`boolean`、`select`、`secret-input` 五种类型,分别对应字符串、数字、布尔值、下拉框、加密输入框,对于敏感信息,我们建议使用`secret-input`类型
+ - `label`(必填)参数标签,用于前端展示
+ - `form` (必填)表单类型,目前支持`llm`、`form`两种类型
+ - 在Agent应用中,`llm`表示该参数LLM自行推理,`form`表示要使用该工具可提前设定的参数
+ - 在workflow应用中,`llm`和`form`均需要前端填写,但`llm`的参数会做为工具节点的输入变量
- `required` 是否必填
- 在`llm`模式下,如果参数为必填,则会要求Agent必须要推理出这个参数
- 在`form`模式下,如果参数为必填,则会要求用户在对话开始前在前端填写这个参数
@@ -149,10 +153,12 @@ parameters: # 参数列表
- 在`llm`模式下,Dify会将所有选项传递给LLM,LLM可以根据这些选项进行推理
- 在`form`模式下,`type`为`select`时,前端会展示这些选项
- `default` 默认值
- - `label` 参数标签,用于前端展示
+ - `min` 最小值,当参数类型为`number`时可以设定
+ - `max` 最大值,当参数类型为`number`时可以设定
- `human_description` 用于前端展示的介绍,支持多语言
+ - `placeholder` 字段输入框的提示文字,在表单类型为`form`,参数类型为`string`、`number`、`secret-input`时,可以设定,支持多语言
- `llm_description` 传递给LLM的介绍,为了使得LLM更好理解这个参数,我们建议在这里写上关于这个参数尽可能详细的信息,让LLM能够理解这个参数
- - `form` 表单类型,目前支持`llm`、`form`两种类型,分别对应Agent自行推理和前端填写
+
## 4. 准备工具代码
当完成工具的配置以后,我们就可以开始编写工具代码了,主要用于实现工具的逻辑。
@@ -176,7 +182,6 @@ class GoogleSearchTool(BuiltinTool):
query = tool_parameters['query']
result_type = tool_parameters['result_type']
api_key = self.runtime.credentials['serpapi_api_key']
- # TODO: search with serpapi
result = SerpAPI(api_key).run(query, result_type=result_type)
if result_type == 'text':
@@ -188,7 +193,7 @@ class GoogleSearchTool(BuiltinTool):
工具的整体逻辑都在`_invoke`方法中,这个方法接收两个参数:`user_id`和`tool_parameters`,分别表示用户ID和工具参数
### 返回数据
-在工具返回时,你可以选择返回一个消息或者多个消息,这里我们返回一个消息,使用`create_text_message`和`create_link_message`可以创建一个文本消息或者一个链接消息。
+在工具返回时,你可以选择返回一条消息或者多个消息,这里我们返回一条消息,使用`create_text_message`和`create_link_message`可以创建一条文本消息或者一条链接消息。如需返回多条消息,可以使用列表构建,例如`[self.create_text_message('msg1'), self.create_text_message('msg2')]`
## 5. 准备供应商代码
最后,我们需要在供应商模块下创建一个供应商类,用于实现供应商的凭据验证逻辑,如果凭据验证失败,将会抛出`ToolProviderCredentialValidationError`异常。
@@ -196,8 +201,6 @@ class GoogleSearchTool(BuiltinTool):
在`google`模块下创建`google.py`,内容如下。
```python
-from core.tools.entities.tool_entities import ToolInvokeMessage, ToolProviderType
-from core.tools.tool.tool import Tool
from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController
from core.tools.errors import ToolProviderCredentialValidationError
diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py
index d00e89d5cd..569a1d3238 100644
--- a/api/core/tools/entities/tool_entities.py
+++ b/api/core/tools/entities/tool_entities.py
@@ -142,7 +142,8 @@ class ToolParameter(BaseModel):
name: str = Field(..., description="The name of the parameter")
label: I18nObject = Field(..., description="The label presented to the user")
- human_description: I18nObject = Field(..., description="The description presented to the user")
+ human_description: Optional[I18nObject] = Field(None, description="The description presented to the user")
+ placeholder: Optional[I18nObject] = Field(None, description="The placeholder presented to the user")
type: ToolParameterType = Field(..., description="The type of the parameter")
form: ToolParameterForm = Field(..., description="The form of the parameter, schema/form/llm")
llm_description: Optional[str] = None
diff --git a/api/core/tools/provider/builtin/cogview/__init__.py b/api/core/tools/provider/builtin/cogview/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/api/core/tools/provider/builtin/cogview/_assets/icon.png b/api/core/tools/provider/builtin/cogview/_assets/icon.png
new file mode 100644
index 0000000000..f0c1c24a02
Binary files /dev/null and b/api/core/tools/provider/builtin/cogview/_assets/icon.png differ
diff --git a/api/core/tools/provider/builtin/cogview/cogview.py b/api/core/tools/provider/builtin/cogview/cogview.py
new file mode 100644
index 0000000000..801817ec06
--- /dev/null
+++ b/api/core/tools/provider/builtin/cogview/cogview.py
@@ -0,0 +1,27 @@
+""" Provide the input parameters type for the cogview provider class """
+from typing import Any
+
+from core.tools.errors import ToolProviderCredentialValidationError
+from core.tools.provider.builtin.cogview.tools.cogview3 import CogView3Tool
+from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController
+
+
+class COGVIEWProvider(BuiltinToolProviderController):
+ """ cogview provider """
+ def _validate_credentials(self, credentials: dict[str, Any]) -> None:
+ try:
+ CogView3Tool().fork_tool_runtime(
+ runtime={
+ "credentials": credentials,
+ }
+ ).invoke(
+ user_id='',
+ tool_parameters={
+ "prompt": "一个城市在水晶瓶中欢快生活的场景,水彩画风格,展现出微观与珠宝般的美丽。",
+ "size": "square",
+ "n": 1
+ },
+ )
+ except Exception as e:
+ raise ToolProviderCredentialValidationError(str(e)) from e
+
\ No newline at end of file
diff --git a/api/core/tools/provider/builtin/cogview/cogview.yaml b/api/core/tools/provider/builtin/cogview/cogview.yaml
new file mode 100644
index 0000000000..374b0e98d9
--- /dev/null
+++ b/api/core/tools/provider/builtin/cogview/cogview.yaml
@@ -0,0 +1,61 @@
+identity:
+ author: Waffle
+ name: cogview
+ label:
+ en_US: CogView
+ zh_Hans: CogView 绘画
+ pt_BR: CogView
+ description:
+ en_US: CogView art
+ zh_Hans: CogView 绘画
+ pt_BR: CogView art
+ icon: icon.png
+ tags:
+ - image
+ - productivity
+credentials_for_provider:
+ zhipuai_api_key:
+ type: secret-input
+ required: true
+ label:
+ en_US: ZhipuAI API key
+ zh_Hans: ZhipuAI API key
+ pt_BR: ZhipuAI API key
+ help:
+ en_US: Please input your ZhipuAI API key
+ zh_Hans: 请输入你的 ZhipuAI API key
+ pt_BR: Please input your ZhipuAI API key
+ placeholder:
+ en_US: Please input your ZhipuAI API key
+ zh_Hans: 请输入你的 ZhipuAI API key
+ pt_BR: Please input your ZhipuAI API key
+ zhipuai_organizaion_id:
+ type: text-input
+ required: false
+ label:
+ en_US: ZhipuAI organization ID
+ zh_Hans: ZhipuAI organization ID
+ pt_BR: ZhipuAI organization ID
+ help:
+ en_US: Please input your ZhipuAI organization ID
+ zh_Hans: 请输入你的 ZhipuAI organization ID
+ pt_BR: Please input your ZhipuAI organization ID
+ placeholder:
+ en_US: Please input your ZhipuAI organization ID
+ zh_Hans: 请输入你的 ZhipuAI organization ID
+ pt_BR: Please input your ZhipuAI organization ID
+ zhipuai_base_url:
+ type: text-input
+ required: false
+ label:
+ en_US: ZhipuAI base URL
+ zh_Hans: ZhipuAI base URL
+ pt_BR: ZhipuAI base URL
+ help:
+ en_US: Please input your ZhipuAI base URL
+ zh_Hans: 请输入你的 ZhipuAI base URL
+ pt_BR: Please input your ZhipuAI base URL
+ placeholder:
+ en_US: Please input your ZhipuAI base URL
+ zh_Hans: 请输入你的 ZhipuAI base URL
+ pt_BR: Please input your ZhipuAI base URL
diff --git a/api/core/tools/provider/builtin/cogview/tools/cogview3.py b/api/core/tools/provider/builtin/cogview/tools/cogview3.py
new file mode 100644
index 0000000000..bb2720196f
--- /dev/null
+++ b/api/core/tools/provider/builtin/cogview/tools/cogview3.py
@@ -0,0 +1,69 @@
+import random
+from typing import Any, Union
+
+from core.model_runtime.model_providers.zhipuai.zhipuai_sdk._client import ZhipuAI
+from core.tools.entities.tool_entities import ToolInvokeMessage
+from core.tools.tool.builtin_tool import BuiltinTool
+
+
+class CogView3Tool(BuiltinTool):
+ """ CogView3 Tool """
+
+ def _invoke(self,
+ user_id: str,
+ tool_parameters: dict[str, Any]
+ ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
+ """
+ Invoke CogView3 tool
+ """
+ client = ZhipuAI(
+ base_url=self.runtime.credentials['zhipuai_base_url'],
+ api_key=self.runtime.credentials['zhipuai_api_key'],
+ )
+ size_mapping = {
+ 'square': '1024x1024',
+ 'vertical': '1024x1792',
+ 'horizontal': '1792x1024',
+ }
+ # prompt
+ prompt = tool_parameters.get('prompt', '')
+ if not prompt:
+ return self.create_text_message('Please input prompt')
+ # get size
+ print(tool_parameters.get('prompt', 'square'))
+ size = size_mapping[tool_parameters.get('size', 'square')]
+ # get n
+ n = tool_parameters.get('n', 1)
+ # get quality
+ quality = tool_parameters.get('quality', 'standard')
+ if quality not in ['standard', 'hd']:
+ return self.create_text_message('Invalid quality')
+ # get style
+ style = tool_parameters.get('style', 'vivid')
+ if style not in ['natural', 'vivid']:
+ return self.create_text_message('Invalid style')
+ # set extra body
+ seed_id = tool_parameters.get('seed_id', self._generate_random_id(8))
+ extra_body = {'seed': seed_id}
+ response = client.images.generations(
+ prompt=prompt,
+ model="cogview-3",
+ size=size,
+ n=n,
+ extra_body=extra_body,
+ style=style,
+ quality=quality,
+ response_format='b64_json'
+ )
+ result = []
+ for image in response.data:
+ result.append(self.create_image_message(image=image.url))
+ result.append(self.create_text_message(
+ f'\nGenerate image source to Seed ID: {seed_id}'))
+ return result
+
+ @staticmethod
+ def _generate_random_id(length=8):
+ characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
+ random_id = ''.join(random.choices(characters, k=length))
+ return random_id
diff --git a/api/core/tools/provider/builtin/cogview/tools/cogview3.yaml b/api/core/tools/provider/builtin/cogview/tools/cogview3.yaml
new file mode 100644
index 0000000000..ba0b271a1c
--- /dev/null
+++ b/api/core/tools/provider/builtin/cogview/tools/cogview3.yaml
@@ -0,0 +1,123 @@
+identity:
+ name: cogview3
+ author: Waffle
+ label:
+ en_US: CogView 3
+ zh_Hans: CogView 3 绘画
+ pt_BR: CogView 3
+ description:
+ en_US: CogView 3 is a powerful drawing tool that can draw the image you want based on your prompt
+ zh_Hans: CogView 3 是一个强大的绘画工具,它可以根据您的提示词绘制出您想要的图像
+ pt_BR: CogView 3 is a powerful drawing tool that can draw the image you want based on your prompt
+description:
+ human:
+ en_US: CogView 3 is a text to image tool
+ zh_Hans: CogView 3 是一个文本到图像的工具
+ pt_BR: CogView 3 is a text to image tool
+ llm: CogView 3 is a tool used to generate images from text
+parameters:
+ - name: prompt
+ type: string
+ required: true
+ label:
+ en_US: Prompt
+ zh_Hans: 提示词
+ pt_BR: Prompt
+ human_description:
+ en_US: Image prompt, you can check the official documentation of CogView 3
+ zh_Hans: 图像提示词,您可以查看CogView 3 的官方文档
+ pt_BR: Image prompt, you can check the official documentation of CogView 3
+ llm_description: Image prompt of CogView 3, you should describe the image you want to generate as a list of words as possible as detailed
+ form: llm
+ - name: size
+ type: select
+ required: true
+ human_description:
+ en_US: selecting the image size
+ zh_Hans: 选择图像大小
+ pt_BR: selecting the image size
+ label:
+ en_US: Image size
+ zh_Hans: 图像大小
+ pt_BR: Image size
+ form: form
+ options:
+ - value: square
+ label:
+ en_US: Squre(1024x1024)
+ zh_Hans: 方(1024x1024)
+ pt_BR: Squre(1024x1024)
+ - value: vertical
+ label:
+ en_US: Vertical(1024x1792)
+ zh_Hans: 竖屏(1024x1792)
+ pt_BR: Vertical(1024x1792)
+ - value: horizontal
+ label:
+ en_US: Horizontal(1792x1024)
+ zh_Hans: 横屏(1792x1024)
+ pt_BR: Horizontal(1792x1024)
+ default: square
+ - name: n
+ type: number
+ required: true
+ human_description:
+ en_US: selecting the number of images
+ zh_Hans: 选择图像数量
+ pt_BR: selecting the number of images
+ label:
+ en_US: Number of images
+ zh_Hans: 图像数量
+ pt_BR: Number of images
+ form: form
+ min: 1
+ max: 1
+ default: 1
+ - name: quality
+ type: select
+ required: true
+ human_description:
+ en_US: selecting the image quality
+ zh_Hans: 选择图像质量
+ pt_BR: selecting the image quality
+ label:
+ en_US: Image quality
+ zh_Hans: 图像质量
+ pt_BR: Image quality
+ form: form
+ options:
+ - value: standard
+ label:
+ en_US: Standard
+ zh_Hans: 标准
+ pt_BR: Standard
+ - value: hd
+ label:
+ en_US: HD
+ zh_Hans: 高清
+ pt_BR: HD
+ default: standard
+ - name: style
+ type: select
+ required: true
+ human_description:
+ en_US: selecting the image style
+ zh_Hans: 选择图像风格
+ pt_BR: selecting the image style
+ label:
+ en_US: Image style
+ zh_Hans: 图像风格
+ pt_BR: Image style
+ form: form
+ options:
+ - value: vivid
+ label:
+ en_US: Vivid
+ zh_Hans: 生动
+ pt_BR: Vivid
+ - value: natural
+ label:
+ en_US: Natural
+ zh_Hans: 自然
+ pt_BR: Natural
+ default: vivid
diff --git a/api/core/tools/provider/builtin/firecrawl/firecrawl_appx.py b/api/core/tools/provider/builtin/firecrawl/firecrawl_appx.py
index 23cb659652..bfe3e7999d 100644
--- a/api/core/tools/provider/builtin/firecrawl/firecrawl_appx.py
+++ b/api/core/tools/provider/builtin/firecrawl/firecrawl_appx.py
@@ -1,3 +1,4 @@
+import logging
import time
from collections.abc import Mapping
from typing import Any
@@ -5,6 +6,7 @@ from typing import Any
import requests
from requests.exceptions import HTTPError
+logger = logging.getLogger(__name__)
class FirecrawlApp:
def __init__(self, api_key: str | None = None, base_url: str | None = None):
@@ -48,6 +50,7 @@ class FirecrawlApp:
headers = self._prepare_headers()
data = {'url': url, **kwargs}
response = self._request('POST', endpoint, data, headers)
+ logger.debug(f"Sent request to {endpoint=} body={data}")
if response is None:
raise HTTPError("Failed to scrape URL after multiple retries")
return response
@@ -57,6 +60,7 @@ class FirecrawlApp:
headers = self._prepare_headers()
data = {'query': query, **kwargs}
response = self._request('POST', endpoint, data, headers)
+ logger.debug(f"Sent request to {endpoint=} body={data}")
if response is None:
raise HTTPError("Failed to perform search after multiple retries")
return response
@@ -66,8 +70,9 @@ class FirecrawlApp:
):
endpoint = f'{self.base_url}/v0/crawl'
headers = self._prepare_headers(idempotency_key)
- data = {'url': url, **kwargs}
+ data = {'url': url, **kwargs['params']}
response = self._request('POST', endpoint, data, headers)
+ logger.debug(f"Sent request to {endpoint=} body={data}")
if response is None:
raise HTTPError("Failed to initiate crawl after multiple retries")
job_id: str = response['jobId']
diff --git a/api/core/tools/tool/api_tool.py b/api/core/tools/tool/api_tool.py
index c8b683f9ef..69e3dfa061 100644
--- a/api/core/tools/tool/api_tool.py
+++ b/api/core/tools/tool/api_tool.py
@@ -238,7 +238,7 @@ class ApiTool(Tool):
return int(value)
elif property['type'] == 'number':
# check if it is a float
- if '.' in value:
+ if '.' in str(value):
return float(value)
else:
return int(value)
diff --git a/api/core/workflow/nodes/if_else/if_else_node.py b/api/core/workflow/nodes/if_else/if_else_node.py
index 95927d11e3..6176a75201 100644
--- a/api/core/workflow/nodes/if_else/if_else_node.py
+++ b/api/core/workflow/nodes/if_else/if_else_node.py
@@ -60,6 +60,8 @@ class IfElseNode(BaseNode):
final_result = all(group_result) if node_data.logical_operator == "and" else any(group_result)
+ selected_case_id = "true" if final_result else "false"
+
process_datas["condition_results"].append(
{
"group": "default",
@@ -78,11 +80,7 @@ class IfElseNode(BaseNode):
error=str(e)
)
- outputs = {
- "result": final_result
- }
- if node_data.cases:
- outputs["selected_case_id"] = selected_case_id
+ outputs = {"result": final_result, "selected_case_id": selected_case_id}
data = NodeRunResult(
status=WorkflowNodeExecutionStatus.SUCCEEDED,
diff --git a/api/extensions/storage/azure_storage.py b/api/extensions/storage/azure_storage.py
index 3403bc6171..af3e7ef849 100644
--- a/api/extensions/storage/azure_storage.py
+++ b/api/extensions/storage/azure_storage.py
@@ -38,9 +38,8 @@ class AzureStorage(BaseStorage):
def generate(filename: str = filename) -> Generator:
blob = client.get_blob_client(container=self.bucket_name, blob=filename)
blob_data = blob.download_blob()
- for chunk in blob_data.chunks():
- yield from chunk
- return generate()
+ yield from blob_data.chunks()
+ return generate(filename)
def download(self, filename, target_filepath):
client = self._sync_client()
diff --git a/api/poetry.lock b/api/poetry.lock
index 6acc65b58d..ae7cdbb32d 100644
--- a/api/poetry.lock
+++ b/api/poetry.lock
@@ -1398,6 +1398,96 @@ prompt-toolkit = ">=3.0.36"
[package.extras]
testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"]
+[[package]]
+name = "clickhouse-connect"
+version = "0.7.16"
+description = "ClickHouse Database Core Driver for Python, Pandas, and Superset"
+optional = false
+python-versions = "~=3.8"
+files = [
+ {file = "clickhouse-connect-0.7.16.tar.gz", hash = "sha256:253a2089efad5729903d00382f73fa8da2cbbfdb118db498cf708ee9f4a2134f"},
+ {file = "clickhouse_connect-0.7.16-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00413deb9e086aabf661d18ac3a3539f25eb773c3675f49353e0d7e6ef1205fc"},
+ {file = "clickhouse_connect-0.7.16-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:faadaf206ea7753782db017daedbf592e4edc7c71cb985aad787eb9dc516bf21"},
+ {file = "clickhouse_connect-0.7.16-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1db8f1168f33fda78adddb733913b211ddf648984d8fef8d934e30df876e5f23"},
+ {file = "clickhouse_connect-0.7.16-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fa630bf50fb064cc53b7ea5d862066476d3c6074003f6d39d2594fb1a7abf67"},
+ {file = "clickhouse_connect-0.7.16-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2cba9547dad41b2d333458615208a3c7db6f56a63473ffea2c05c44225ffa020"},
+ {file = "clickhouse_connect-0.7.16-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:480f7856fcf42a21f17886e0b42d70499067c865fc2a0ea7c0eb5c0bdca281a8"},
+ {file = "clickhouse_connect-0.7.16-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b65f3eb570cbcf9fa383b4e0925d1ceb3efd3deba42a435625cad75b3a9ff7f3"},
+ {file = "clickhouse_connect-0.7.16-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b78d3cc0fe42374bb9d5a05ba71578dc69f7e4b4c771e86dcf292ae0412265cc"},
+ {file = "clickhouse_connect-0.7.16-cp310-cp310-win32.whl", hash = "sha256:1cb76b26fcde1ba6a8ae68e1db1f9e42d458879a0d4d2c9843cc998f42f445ac"},
+ {file = "clickhouse_connect-0.7.16-cp310-cp310-win_amd64.whl", hash = "sha256:9298b344168271e952ea41021963ca1b81b9b3c38be8b036cb64a2556edbb4b7"},
+ {file = "clickhouse_connect-0.7.16-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ae39a765735cc6e786e5f9a0dba799e7f8ee0bbd5dfc5d5ff755dfa9dd13855"},
+ {file = "clickhouse_connect-0.7.16-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f32546f65dd234a49310cda454713a5f7fbc8ba978744e070355c7ea8819a5a"},
+ {file = "clickhouse_connect-0.7.16-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20865c81a5b378625a528ac8960e08cdca316147f87fad6deb9f16c0d5e5f62f"},
+ {file = "clickhouse_connect-0.7.16-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609c076261d779703bf29e7a27dafc8283153403ceab1ec23d50eb2acabc4b9d"},
+ {file = "clickhouse_connect-0.7.16-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e07862e75ac7419c5671384055f11ca5e76dc2c0be4a6f3aed7bf419997184bc"},
+ {file = "clickhouse_connect-0.7.16-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d5db7da6f20b9a49b288063de9b3224a56634f8cb94d19d435af518ed81872c3"},
+ {file = "clickhouse_connect-0.7.16-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:955c567ede68a10325045bb2adf1314ff569dfb7e52f6074c18182f3803279f6"},
+ {file = "clickhouse_connect-0.7.16-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df517bfe23d85f5aeeb17b262c06d0a5c24e0baea09688a96d02dc8589ef8b07"},
+ {file = "clickhouse_connect-0.7.16-cp311-cp311-win32.whl", hash = "sha256:7f2c6132fc90df6a8318abb9f257c2b777404908b7d168ac08235d516f65a663"},
+ {file = "clickhouse_connect-0.7.16-cp311-cp311-win_amd64.whl", hash = "sha256:ca1dba53da86691a11671d846988dc4f6ad02a66f5a0df9a87a46dc4ec9bb0a1"},
+ {file = "clickhouse_connect-0.7.16-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f8f7260073b6ee63e19d442ebb6954bc7741a5ce4ed563eb8074c8c6a0158eca"},
+ {file = "clickhouse_connect-0.7.16-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b3dd93ada1099cb6df244d79973c811e90a4590685e78e60e8846914b3c261e"},
+ {file = "clickhouse_connect-0.7.16-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d3c3458bce25fe9c10e1dbf82dbeeeb2f04e382130f9811cc3bedf44c2028ca"},
+ {file = "clickhouse_connect-0.7.16-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcc302390b4ea975efd8d2ca53d295d40dc766179dd5e9fc158e808f01d9280d"},
+ {file = "clickhouse_connect-0.7.16-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a94f6d095d7174c55825e0b5c04b77897a1b2a8a8bbb38f3f773fd3113a7be27"},
+ {file = "clickhouse_connect-0.7.16-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6b7e2572993ef2e1dee5012875a7a2d08cede319e32ccdd2db90ed26a0d0c037"},
+ {file = "clickhouse_connect-0.7.16-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e9c35ee425309ed8ef63bae31e1d3c5f35706fa27ae2836e61e7cb9bbe7f00cb"},
+ {file = "clickhouse_connect-0.7.16-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:eb0471d5a32d07eaa37772871ee9e6b5eb37ab907c3c154833824ed68ee4795b"},
+ {file = "clickhouse_connect-0.7.16-cp312-cp312-win32.whl", hash = "sha256:b531ee18b4ce16f1d2b8f6249859cbd600f7e0f312f80dda8deb969791a90f17"},
+ {file = "clickhouse_connect-0.7.16-cp312-cp312-win_amd64.whl", hash = "sha256:38392308344770864843f7f8b914799684c13ce4b272d5a3a55e5512ff8a3ae0"},
+ {file = "clickhouse_connect-0.7.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:052ca80d66e49c94d103c9842d2a5b0ebf4610981b79164660ef6b1bdc4b5e85"},
+ {file = "clickhouse_connect-0.7.16-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b496059d145c68e956aa10cd04e5c7cb4e97312eb3f7829cec8f4f7024f8ced6"},
+ {file = "clickhouse_connect-0.7.16-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de1e423fc9c415b9fdcbb6f23eccae981e3f0f0cf142e518efec709bda7c1394"},
+ {file = "clickhouse_connect-0.7.16-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:555c64719cbc72675d58ea6dfc144fa8064ea1d673a54afd2d54e34c58f17c6b"},
+ {file = "clickhouse_connect-0.7.16-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0c3c063ab23df8f71a36505880bf5de6c18aee246938d787447e52b4d9d5531"},
+ {file = "clickhouse_connect-0.7.16-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5ed62e08cfe445d0430b91c26fb276e2a5175e456e9786594fb6e67c9ebd8c6c"},
+ {file = "clickhouse_connect-0.7.16-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9eb056bd14ca3c1d7e3edd7ca79ea970d45e5e536930dbb6179aeb965d5bc3d"},
+ {file = "clickhouse_connect-0.7.16-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:54e0a03b685ee6c138954846dafb6ec0e0baf8257f2587c61e34c017f3dc9d63"},
+ {file = "clickhouse_connect-0.7.16-cp38-cp38-win32.whl", hash = "sha256:d8402c3145387726bd19f916ca2890576be70c4493f030c068f6f03a75addff7"},
+ {file = "clickhouse_connect-0.7.16-cp38-cp38-win_amd64.whl", hash = "sha256:70e376d2ebc0f092fae35f7b50ff7296ee8ffd2dda3536238f6c39a5c949d115"},
+ {file = "clickhouse_connect-0.7.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cee4f91ad22401c3b96f5df3f3149ef2894e7c2d00b5abd9da80119e7b6592f7"},
+ {file = "clickhouse_connect-0.7.16-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a3009145f35e9ac2535dbd8fdbdc218abfe0971c9bc9b730eb5c3f6c40faeb5f"},
+ {file = "clickhouse_connect-0.7.16-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d0ef9f877ffbcb0f526ce9c35c657fc54930d043e45c077d9d886c0f1add727"},
+ {file = "clickhouse_connect-0.7.16-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc437b3ff2f7991b209b861a89c003ac1971c890775190178438780e967a9d3"},
+ {file = "clickhouse_connect-0.7.16-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ed836dcee4ac097bd83714abe0af987b1ef767675a555e7643d793164c3f1cc"},
+ {file = "clickhouse_connect-0.7.16-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4c4e0d173239c0b4594c8703fae5c8ba3241c4e0763a8cf436b94564692671f9"},
+ {file = "clickhouse_connect-0.7.16-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a17a348dd8c00df343a01128497e8c3a6ae431f13c7a88e363ac12c035316ce0"},
+ {file = "clickhouse_connect-0.7.16-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:805ae7ad39c043af13e2b5af45abb70330f0907749dc87ad4a2481a4ac209cc6"},
+ {file = "clickhouse_connect-0.7.16-cp39-cp39-win32.whl", hash = "sha256:38fc6ca1bd73cf4dcebd22fbb8dceda267908ff674fc57fbc23c3b5df9c21ac1"},
+ {file = "clickhouse_connect-0.7.16-cp39-cp39-win_amd64.whl", hash = "sha256:3dc67e99e40b5a8bc493a21016830b0f3800006a6038c1fd881f7cae6246cc44"},
+ {file = "clickhouse_connect-0.7.16-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b7f526fef71bd5265f47915340a6369a5b5685278b72b5aff281cc521a8ec376"},
+ {file = "clickhouse_connect-0.7.16-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e00f87ba68bbc63dd32d7a304fd629b759f24b09f88fbc2bac0a9ed1fe7b2938"},
+ {file = "clickhouse_connect-0.7.16-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09c84f3b64d6bebedcfbbd19e8369b3df2cb7d313afb2a0d64a3e151d344c1c1"},
+ {file = "clickhouse_connect-0.7.16-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d104ab78edee26e8cef056e2db83f03e1da918df0946e1ef1ad9a27a024dd0"},
+ {file = "clickhouse_connect-0.7.16-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cc1ad53e282ff5b4288fdfcf6df72cda542d9d997de5889d66a1f8e2b9f477f0"},
+ {file = "clickhouse_connect-0.7.16-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fddc99322054f5d3df8715ab3724bd36ac636f8ceaed4f5f3f60d377abd22d22"},
+ {file = "clickhouse_connect-0.7.16-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:765a2de98197d1b4f6424611ceaca2ae896a1d7093b943403973888cb7c144e6"},
+ {file = "clickhouse_connect-0.7.16-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1540e0a93e5f2147400f644606a399c91705066f05d5a91429616ee9812f4521"},
+ {file = "clickhouse_connect-0.7.16-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba928c4178b0d4a513e1b0ad32a464ab56cb1bc27736a7f41b32e4eb70eb08d6"},
+ {file = "clickhouse_connect-0.7.16-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a17ffc22e905081f002173b30959089de6987fd40c87e7794da9d978d723e610"},
+ {file = "clickhouse_connect-0.7.16-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:26df09787232b495285d8358db145b9770f472e2e30147912634c5b56392e73f"},
+ {file = "clickhouse_connect-0.7.16-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2a3ce33241441dc7c718c19e31645323e6c5da793d46bbb670fd4e8557b8605"},
+ {file = "clickhouse_connect-0.7.16-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29f9dc9cc1f4ec4a333bf119abb5cee13563e89bc990d4d77b8f43cf630e9fb1"},
+ {file = "clickhouse_connect-0.7.16-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a16a7ada11996a6fa0959c83e2e46ff32773e57eca40eff86176fd62a30054ca"},
+ {file = "clickhouse_connect-0.7.16-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ead20e1d4f3c5493dd075b7dc81b5d21be4b876aca6952e1c155824876c621f3"},
+]
+
+[package.dependencies]
+certifi = "*"
+lz4 = "*"
+pytz = "*"
+urllib3 = ">=1.26"
+zstandard = "*"
+
+[package.extras]
+arrow = ["pyarrow"]
+numpy = ["numpy"]
+orjson = ["orjson"]
+pandas = ["pandas"]
+sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"]
+tzlocal = ["tzlocal (>=4.0)"]
+
[[package]]
name = "clickhouse-driver"
version = "0.2.8"
@@ -4259,6 +4349,56 @@ html5 = ["html5lib"]
htmlsoup = ["BeautifulSoup4"]
source = ["Cython (>=3.0.7)"]
+[[package]]
+name = "lz4"
+version = "4.3.3"
+description = "LZ4 Bindings for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "lz4-4.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b891880c187e96339474af2a3b2bfb11a8e4732ff5034be919aa9029484cd201"},
+ {file = "lz4-4.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:222a7e35137d7539c9c33bb53fcbb26510c5748779364014235afc62b0ec797f"},
+ {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f76176492ff082657ada0d0f10c794b6da5800249ef1692b35cf49b1e93e8ef7"},
+ {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d18718f9d78182c6b60f568c9a9cec8a7204d7cb6fad4e511a2ef279e4cb05"},
+ {file = "lz4-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cdc60e21ec70266947a48839b437d46025076eb4b12c76bd47f8e5eb8a75dcc"},
+ {file = "lz4-4.3.3-cp310-cp310-win32.whl", hash = "sha256:c81703b12475da73a5d66618856d04b1307e43428a7e59d98cfe5a5d608a74c6"},
+ {file = "lz4-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:43cf03059c0f941b772c8aeb42a0813d68d7081c009542301637e5782f8a33e2"},
+ {file = "lz4-4.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30e8c20b8857adef7be045c65f47ab1e2c4fabba86a9fa9a997d7674a31ea6b6"},
+ {file = "lz4-4.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7b1839f795315e480fb87d9bc60b186a98e3e5d17203c6e757611ef7dcef61"},
+ {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edfd858985c23523f4e5a7526ca6ee65ff930207a7ec8a8f57a01eae506aaee7"},
+ {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e9c410b11a31dbdc94c05ac3c480cb4b222460faf9231f12538d0074e56c563"},
+ {file = "lz4-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2507ee9c99dbddd191c86f0e0c8b724c76d26b0602db9ea23232304382e1f21"},
+ {file = "lz4-4.3.3-cp311-cp311-win32.whl", hash = "sha256:f180904f33bdd1e92967923a43c22899e303906d19b2cf8bb547db6653ea6e7d"},
+ {file = "lz4-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b14d948e6dce389f9a7afc666d60dd1e35fa2138a8ec5306d30cd2e30d36b40c"},
+ {file = "lz4-4.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e36cd7b9d4d920d3bfc2369840da506fa68258f7bb176b8743189793c055e43d"},
+ {file = "lz4-4.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31ea4be9d0059c00b2572d700bf2c1bc82f241f2c3282034a759c9a4d6ca4dc2"},
+ {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c9a6fd20767ccaf70649982f8f3eeb0884035c150c0b818ea660152cf3c809"},
+ {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca8fccc15e3add173da91be8f34121578dc777711ffd98d399be35487c934bf"},
+ {file = "lz4-4.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d84b479ddf39fe3ea05387f10b779155fc0990125f4fb35d636114e1c63a2e"},
+ {file = "lz4-4.3.3-cp312-cp312-win32.whl", hash = "sha256:337cb94488a1b060ef1685187d6ad4ba8bc61d26d631d7ba909ee984ea736be1"},
+ {file = "lz4-4.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:5d35533bf2cee56f38ced91f766cd0038b6abf46f438a80d50c52750088be93f"},
+ {file = "lz4-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:363ab65bf31338eb364062a15f302fc0fab0a49426051429866d71c793c23394"},
+ {file = "lz4-4.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a136e44a16fc98b1abc404fbabf7f1fada2bdab6a7e970974fb81cf55b636d0"},
+ {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abc197e4aca8b63f5ae200af03eb95fb4b5055a8f990079b5bdf042f568469dd"},
+ {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56f4fe9c6327adb97406f27a66420b22ce02d71a5c365c48d6b656b4aaeb7775"},
+ {file = "lz4-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0e822cd7644995d9ba248cb4b67859701748a93e2ab7fc9bc18c599a52e4604"},
+ {file = "lz4-4.3.3-cp38-cp38-win32.whl", hash = "sha256:24b3206de56b7a537eda3a8123c644a2b7bf111f0af53bc14bed90ce5562d1aa"},
+ {file = "lz4-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:b47839b53956e2737229d70714f1d75f33e8ac26e52c267f0197b3189ca6de24"},
+ {file = "lz4-4.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6756212507405f270b66b3ff7f564618de0606395c0fe10a7ae2ffcbbe0b1fba"},
+ {file = "lz4-4.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee9ff50557a942d187ec85462bb0960207e7ec5b19b3b48949263993771c6205"},
+ {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b901c7784caac9a1ded4555258207d9e9697e746cc8532129f150ffe1f6ba0d"},
+ {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d9ec061b9eca86e4dcc003d93334b95d53909afd5a32c6e4f222157b50c071"},
+ {file = "lz4-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4c7bf687303ca47d69f9f0133274958fd672efaa33fb5bcde467862d6c621f0"},
+ {file = "lz4-4.3.3-cp39-cp39-win32.whl", hash = "sha256:054b4631a355606e99a42396f5db4d22046a3397ffc3269a348ec41eaebd69d2"},
+ {file = "lz4-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:eac9af361e0d98335a02ff12fb56caeb7ea1196cf1a49dbf6f17828a131da807"},
+ {file = "lz4-4.3.3.tar.gz", hash = "sha256:01fe674ef2889dbb9899d8a67361e0c4a2c833af5aeb37dd505727cf5d2a131e"},
+]
+
+[package.extras]
+docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"]
+flake8 = ["flake8"]
+tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"]
+
[[package]]
name = "mailchimp-transactional"
version = "1.0.56"
@@ -9190,7 +9330,68 @@ docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"]
test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
+[[package]]
+name = "zstandard"
+version = "0.22.0"
+description = "Zstandard bindings for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "zstandard-0.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:275df437ab03f8c033b8a2c181e51716c32d831082d93ce48002a5227ec93019"},
+ {file = "zstandard-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ac9957bc6d2403c4772c890916bf181b2653640da98f32e04b96e4d6fb3252a"},
+ {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe3390c538f12437b859d815040763abc728955a52ca6ff9c5d4ac707c4ad98e"},
+ {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1958100b8a1cc3f27fa21071a55cb2ed32e9e5df4c3c6e661c193437f171cba2"},
+ {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e1856c8313bc688d5df069e106a4bc962eef3d13372020cc6e3ebf5e045202"},
+ {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1a90ba9a4c9c884bb876a14be2b1d216609385efb180393df40e5172e7ecf356"},
+ {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3db41c5e49ef73641d5111554e1d1d3af106410a6c1fb52cf68912ba7a343a0d"},
+ {file = "zstandard-0.22.0-cp310-cp310-win32.whl", hash = "sha256:d8593f8464fb64d58e8cb0b905b272d40184eac9a18d83cf8c10749c3eafcd7e"},
+ {file = "zstandard-0.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a4b358947a65b94e2501ce3e078bbc929b039ede4679ddb0460829b12f7375"},
+ {file = "zstandard-0.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:589402548251056878d2e7c8859286eb91bd841af117dbe4ab000e6450987e08"},
+ {file = "zstandard-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a97079b955b00b732c6f280d5023e0eefe359045e8b83b08cf0333af9ec78f26"},
+ {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:445b47bc32de69d990ad0f34da0e20f535914623d1e506e74d6bc5c9dc40bb09"},
+ {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33591d59f4956c9812f8063eff2e2c0065bc02050837f152574069f5f9f17775"},
+ {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:888196c9c8893a1e8ff5e89b8f894e7f4f0e64a5af4d8f3c410f0319128bb2f8"},
+ {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:53866a9d8ab363271c9e80c7c2e9441814961d47f88c9bc3b248142c32141d94"},
+ {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ac59d5d6910b220141c1737b79d4a5aa9e57466e7469a012ed42ce2d3995e88"},
+ {file = "zstandard-0.22.0-cp311-cp311-win32.whl", hash = "sha256:2b11ea433db22e720758cba584c9d661077121fcf60ab43351950ded20283440"},
+ {file = "zstandard-0.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:11f0d1aab9516a497137b41e3d3ed4bbf7b2ee2abc79e5c8b010ad286d7464bd"},
+ {file = "zstandard-0.22.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6c25b8eb733d4e741246151d895dd0308137532737f337411160ff69ca24f93a"},
+ {file = "zstandard-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9b2cde1cd1b2a10246dbc143ba49d942d14fb3d2b4bccf4618d475c65464912"},
+ {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a88b7df61a292603e7cd662d92565d915796b094ffb3d206579aaebac6b85d5f"},
+ {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466e6ad8caefb589ed281c076deb6f0cd330e8bc13c5035854ffb9c2014b118c"},
+ {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1d67d0d53d2a138f9e29d8acdabe11310c185e36f0a848efa104d4e40b808e4"},
+ {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:39b2853efc9403927f9065cc48c9980649462acbdf81cd4f0cb773af2fd734bc"},
+ {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8a1b2effa96a5f019e72874969394edd393e2fbd6414a8208fea363a22803b45"},
+ {file = "zstandard-0.22.0-cp312-cp312-win32.whl", hash = "sha256:88c5b4b47a8a138338a07fc94e2ba3b1535f69247670abfe422de4e0b344aae2"},
+ {file = "zstandard-0.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:de20a212ef3d00d609d0b22eb7cc798d5a69035e81839f549b538eff4105d01c"},
+ {file = "zstandard-0.22.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d75f693bb4e92c335e0645e8845e553cd09dc91616412d1d4650da835b5449df"},
+ {file = "zstandard-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:36a47636c3de227cd765e25a21dc5dace00539b82ddd99ee36abae38178eff9e"},
+ {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68953dc84b244b053c0d5f137a21ae8287ecf51b20872eccf8eaac0302d3e3b0"},
+ {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2612e9bb4977381184bb2463150336d0f7e014d6bb5d4a370f9a372d21916f69"},
+ {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23d2b3c2b8e7e5a6cb7922f7c27d73a9a615f0a5ab5d0e03dd533c477de23004"},
+ {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d43501f5f31e22baf822720d82b5547f8a08f5386a883b32584a185675c8fbf"},
+ {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a493d470183ee620a3df1e6e55b3e4de8143c0ba1b16f3ded83208ea8ddfd91d"},
+ {file = "zstandard-0.22.0-cp38-cp38-win32.whl", hash = "sha256:7034d381789f45576ec3f1fa0e15d741828146439228dc3f7c59856c5bcd3292"},
+ {file = "zstandard-0.22.0-cp38-cp38-win_amd64.whl", hash = "sha256:d8fff0f0c1d8bc5d866762ae95bd99d53282337af1be9dc0d88506b340e74b73"},
+ {file = "zstandard-0.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2fdd53b806786bd6112d97c1f1e7841e5e4daa06810ab4b284026a1a0e484c0b"},
+ {file = "zstandard-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:73a1d6bd01961e9fd447162e137ed949c01bdb830dfca487c4a14e9742dccc93"},
+ {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9501f36fac6b875c124243a379267d879262480bf85b1dbda61f5ad4d01b75a3"},
+ {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f260e4c7294ef275744210a4010f116048e0c95857befb7462e033f09442fe"},
+ {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959665072bd60f45c5b6b5d711f15bdefc9849dd5da9fb6c873e35f5d34d8cfb"},
+ {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d22fdef58976457c65e2796e6730a3ea4a254f3ba83777ecfc8592ff8d77d303"},
+ {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a7ccf5825fd71d4542c8ab28d4d482aace885f5ebe4b40faaa290eed8e095a4c"},
+ {file = "zstandard-0.22.0-cp39-cp39-win32.whl", hash = "sha256:f058a77ef0ece4e210bb0450e68408d4223f728b109764676e1a13537d056bb0"},
+ {file = "zstandard-0.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:e9e9d4e2e336c529d4c435baad846a181e39a982f823f7e4495ec0b0ec8538d2"},
+ {file = "zstandard-0.22.0.tar.gz", hash = "sha256:8226a33c542bcb54cd6bd0a366067b610b41713b64c9abec1bc4533d69f51e70"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""}
+
+[package.extras]
+cffi = ["cffi (>=1.11)"]
+
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
-content-hash = "7dc35227a8e2545597f7a9660850e9adb2569d38f97d72dbfdcdff88f3a38bdb"
+content-hash = "76374a3483905c3219821ec31cefd75c64e0ccb3f0c5424acf1b8a1322587411"
diff --git a/api/pyproject.toml b/api/pyproject.toml
index 74fad2054a..88721b5a35 100644
--- a/api/pyproject.toml
+++ b/api/pyproject.toml
@@ -211,6 +211,7 @@ qdrant-client = "1.7.3"
weaviate-client = "~3.21.0"
alibabacloud_gpdb20160503 = "~3.8.0"
alibabacloud_tea_openapi = "~0.3.9"
+clickhouse-connect = "~0.7.16"
############################################################
# Transparent dependencies required by main dependencies
diff --git a/api/schedule/clean_embedding_cache_task.py b/api/schedule/clean_embedding_cache_task.py
index 0daf651d2f..3d49b487c6 100644
--- a/api/schedule/clean_embedding_cache_task.py
+++ b/api/schedule/clean_embedding_cache_task.py
@@ -2,10 +2,10 @@ import datetime
import time
import click
-from flask import current_app
from werkzeug.exceptions import NotFound
import app
+from configs import dify_config
from extensions.ext_database import db
from models.dataset import Embedding
@@ -13,7 +13,7 @@ from models.dataset import Embedding
@app.celery.task(queue='dataset')
def clean_embedding_cache_task():
click.echo(click.style('Start clean embedding cache.', fg='green'))
- clean_days = int(current_app.config.get('CLEAN_DAY_SETTING'))
+ clean_days = int(dify_config.CLEAN_DAY_SETTING)
start_at = time.perf_counter()
thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=clean_days)
page = 1
diff --git a/api/schedule/clean_unused_datasets_task.py b/api/schedule/clean_unused_datasets_task.py
index cdcb3121b9..2033791ace 100644
--- a/api/schedule/clean_unused_datasets_task.py
+++ b/api/schedule/clean_unused_datasets_task.py
@@ -2,10 +2,10 @@ import datetime
import time
import click
-from flask import current_app
from werkzeug.exceptions import NotFound
import app
+from configs import dify_config
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from models.dataset import Dataset, DatasetQuery, Document
@@ -14,7 +14,7 @@ from models.dataset import Dataset, DatasetQuery, Document
@app.celery.task(queue='dataset')
def clean_unused_datasets_task():
click.echo(click.style('Start clean unused datasets indexes.', fg='green'))
- clean_days = int(current_app.config.get('CLEAN_DAY_SETTING'))
+ clean_days = int(dify_config.CLEAN_DAY_SETTING)
start_at = time.perf_counter()
thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=clean_days)
page = 1
diff --git a/api/services/account_service.py b/api/services/account_service.py
index 3fd2b5c627..0bcbe8b2c0 100644
--- a/api/services/account_service.py
+++ b/api/services/account_service.py
@@ -6,10 +6,10 @@ from datetime import datetime, timedelta, timezone
from hashlib import sha256
from typing import Any, Optional
-from flask import current_app
from sqlalchemy import func
from werkzeug.exceptions import Unauthorized
+from configs import dify_config
from constants.languages import language_timezone_mapping, languages
from events.tenant_event import tenant_was_created
from extensions.ext_redis import redis_client
@@ -80,7 +80,7 @@ class AccountService:
payload = {
"user_id": account.id,
"exp": datetime.now(timezone.utc).replace(tzinfo=None) + exp,
- "iss": current_app.config['EDITION'],
+ "iss": dify_config.EDITION,
"sub": 'Console API Passport',
}
@@ -524,7 +524,7 @@ class RegisterService:
TenantService.create_owner_tenant_if_not_exist(account)
dify_setup = DifySetup(
- version=current_app.config['CURRENT_VERSION']
+ version=dify_config.CURRENT_VERSION
)
db.session.add(dify_setup)
db.session.commit()
@@ -559,7 +559,7 @@ class RegisterService:
if open_id is not None or provider is not None:
AccountService.link_account_integrate(provider, open_id, account)
- if current_app.config['EDITION'] != 'SELF_HOSTED':
+ if dify_config.EDITION != 'SELF_HOSTED':
tenant = TenantService.create_tenant(f"{account.name}'s Workspace")
TenantService.create_tenant_member(tenant, account, role='owner')
@@ -623,7 +623,7 @@ class RegisterService:
'email': account.email,
'workspace_id': tenant.id,
}
- expiryHours = current_app.config['INVITE_EXPIRY_HOURS']
+ expiryHours = dify_config.INVITE_EXPIRY_HOURS
redis_client.setex(
cls._get_invitation_token_key(token),
expiryHours * 60 * 60,
diff --git a/api/services/app_generate_service.py b/api/services/app_generate_service.py
index 3acd3becdb..e894570b97 100644
--- a/api/services/app_generate_service.py
+++ b/api/services/app_generate_service.py
@@ -1,6 +1,7 @@
from collections.abc import Generator
from typing import Any, Union
+from configs import dify_config
from core.app.apps.advanced_chat.app_generator import AdvancedChatAppGenerator
from core.app.apps.agent_chat.app_generator import AgentChatAppGenerator
from core.app.apps.chat.app_generator import ChatAppGenerator
@@ -89,8 +90,7 @@ class AppGenerateService:
def _get_max_active_requests(app_model: App) -> int:
max_active_requests = app_model.max_active_requests
if app_model.max_active_requests is None:
- from flask import current_app
- max_active_requests = int(current_app.config['APP_MAX_ACTIVE_REQUESTS'])
+ max_active_requests = int(dify_config.APP_MAX_ACTIVE_REQUESTS)
return max_active_requests
@classmethod
diff --git a/api/services/app_service.py b/api/services/app_service.py
index 03986db2ae..ca3c8d4fdc 100644
--- a/api/services/app_service.py
+++ b/api/services/app_service.py
@@ -4,10 +4,10 @@ from datetime import datetime, timezone
from typing import cast
import yaml
-from flask import current_app
from flask_login import current_user
from flask_sqlalchemy.pagination import Pagination
+from configs import dify_config
from constants.model_template import default_app_templates
from core.agent.entities import AgentToolEntity
from core.app.features.rate_limiting import RateLimit
@@ -446,7 +446,7 @@ class AppService:
# get all tools
tools = agent_config.get('tools', [])
- url_prefix = (current_app.config.get("CONSOLE_API_URL")
+ url_prefix = (dify_config.CONSOLE_API_URL
+ "/console/api/workspaces/current/tool-provider/builtin/")
for tool in tools:
diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py
index fa0a1bbc58..fbaf44c9a4 100644
--- a/api/services/dataset_service.py
+++ b/api/services/dataset_service.py
@@ -6,10 +6,10 @@ import time
import uuid
from typing import Optional
-from flask import current_app
from flask_login import current_user
from sqlalchemy import func
+from configs import dify_config
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
from core.model_manager import ModelManager
from core.model_runtime.entities.model_entities import ModelType
@@ -650,7 +650,7 @@ class DocumentService:
elif document_data["data_source"]["type"] == "website_crawl":
website_info = document_data["data_source"]['info_list']['website_info_list']
count = len(website_info['urls'])
- batch_upload_limit = int(current_app.config['BATCH_UPLOAD_LIMIT'])
+ batch_upload_limit = int(dify_config.BATCH_UPLOAD_LIMIT)
if count > batch_upload_limit:
raise ValueError(f"You have reached the batch upload limit of {batch_upload_limit}.")
@@ -1028,7 +1028,7 @@ class DocumentService:
elif document_data["data_source"]["type"] == "website_crawl":
website_info = document_data["data_source"]['info_list']['website_info_list']
count = len(website_info['urls'])
- batch_upload_limit = int(current_app.config['BATCH_UPLOAD_LIMIT'])
+ batch_upload_limit = int(dify_config.BATCH_UPLOAD_LIMIT)
if count > batch_upload_limit:
raise ValueError(f"You have reached the batch upload limit of {batch_upload_limit}.")
diff --git a/api/services/entities/model_provider_entities.py b/api/services/entities/model_provider_entities.py
index 853172ea13..e5e4d7e235 100644
--- a/api/services/entities/model_provider_entities.py
+++ b/api/services/entities/model_provider_entities.py
@@ -1,9 +1,9 @@
from enum import Enum
from typing import Optional
-from flask import current_app
from pydantic import BaseModel, ConfigDict
+from configs import dify_config
from core.entities.model_entities import ModelWithProviderEntity, ProviderModelWithStatusEntity
from core.entities.provider_entities import QuotaConfiguration
from core.model_runtime.entities.common_entities import I18nObject
@@ -67,7 +67,7 @@ class ProviderResponse(BaseModel):
def __init__(self, **data) -> None:
super().__init__(**data)
- url_prefix = (current_app.config.get("CONSOLE_API_URL")
+ url_prefix = (dify_config.CONSOLE_API_URL
+ f"/console/api/workspaces/current/model-providers/{self.provider}")
if self.icon_small is not None:
self.icon_small = I18nObject(
@@ -96,7 +96,7 @@ class ProviderWithModelsResponse(BaseModel):
def __init__(self, **data) -> None:
super().__init__(**data)
- url_prefix = (current_app.config.get("CONSOLE_API_URL")
+ url_prefix = (dify_config.CONSOLE_API_URL
+ f"/console/api/workspaces/current/model-providers/{self.provider}")
if self.icon_small is not None:
self.icon_small = I18nObject(
@@ -119,7 +119,7 @@ class SimpleProviderEntityResponse(SimpleProviderEntity):
def __init__(self, **data) -> None:
super().__init__(**data)
- url_prefix = (current_app.config.get("CONSOLE_API_URL")
+ url_prefix = (dify_config.CONSOLE_API_URL
+ f"/console/api/workspaces/current/model-providers/{self.provider}")
if self.icon_small is not None:
self.icon_small = I18nObject(
diff --git a/api/services/feature_service.py b/api/services/feature_service.py
index 7375554156..83e675a9d2 100644
--- a/api/services/feature_service.py
+++ b/api/services/feature_service.py
@@ -1,6 +1,6 @@
-from flask import current_app
from pydantic import BaseModel, ConfigDict
+from configs import dify_config
from services.billing_service import BillingService
from services.enterprise.enterprise_service import EnterpriseService
@@ -51,7 +51,7 @@ class FeatureService:
cls._fulfill_params_from_env(features)
- if current_app.config['BILLING_ENABLED']:
+ if dify_config.BILLING_ENABLED:
cls._fulfill_params_from_billing_api(features, tenant_id)
return features
@@ -60,16 +60,16 @@ class FeatureService:
def get_system_features(cls) -> SystemFeatureModel:
system_features = SystemFeatureModel()
- if current_app.config['ENTERPRISE_ENABLED']:
+ if dify_config.ENTERPRISE_ENABLED:
cls._fulfill_params_from_enterprise(system_features)
return system_features
@classmethod
def _fulfill_params_from_env(cls, features: FeatureModel):
- features.can_replace_logo = current_app.config['CAN_REPLACE_LOGO']
- features.model_load_balancing_enabled = current_app.config['MODEL_LB_ENABLED']
- features.dataset_operator_enabled = current_app.config['DATASET_OPERATOR_ENABLED']
+ features.can_replace_logo = dify_config.CAN_REPLACE_LOGO
+ features.model_load_balancing_enabled = dify_config.MODEL_LB_ENABLED
+ features.dataset_operator_enabled = dify_config.DATASET_OPERATOR_ENABLED
@classmethod
def _fulfill_params_from_billing_api(cls, features: FeatureModel, tenant_id: str):
diff --git a/api/services/file_service.py b/api/services/file_service.py
index 6c308a09df..c686b190fe 100644
--- a/api/services/file_service.py
+++ b/api/services/file_service.py
@@ -4,11 +4,11 @@ import uuid
from collections.abc import Generator
from typing import Union
-from flask import current_app
from flask_login import current_user
from werkzeug.datastructures import FileStorage
from werkzeug.exceptions import NotFound
+from configs import dify_config
from core.file.upload_file_parser import UploadFileParser
from core.rag.extractor.extract_processor import ExtractProcessor
from extensions.ext_database import db
@@ -35,7 +35,7 @@ class FileService:
extension = file.filename.split('.')[-1]
if len(filename) > 200:
filename = filename.split('.')[0][:200] + '.' + extension
- etl_type = current_app.config['ETL_TYPE']
+ etl_type = dify_config.ETL_TYPE
allowed_extensions = UNSTRUCTURED_ALLOWED_EXTENSIONS + IMAGE_EXTENSIONS if etl_type == 'Unstructured' \
else ALLOWED_EXTENSIONS + IMAGE_EXTENSIONS
if extension.lower() not in allowed_extensions:
@@ -50,9 +50,9 @@ class FileService:
file_size = len(file_content)
if extension.lower() in IMAGE_EXTENSIONS:
- file_size_limit = current_app.config.get("UPLOAD_IMAGE_FILE_SIZE_LIMIT") * 1024 * 1024
+ file_size_limit = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT * 1024 * 1024
else:
- file_size_limit = current_app.config.get("UPLOAD_FILE_SIZE_LIMIT") * 1024 * 1024
+ file_size_limit = dify_config.UPLOAD_FILE_SIZE_LIMIT * 1024 * 1024
if file_size > file_size_limit:
message = f'File size exceeded. {file_size} > {file_size_limit}'
@@ -73,10 +73,9 @@ class FileService:
storage.save(file_key, file_content)
# save file to db
- config = current_app.config
upload_file = UploadFile(
tenant_id=current_tenant_id,
- storage_type=config['STORAGE_TYPE'],
+ storage_type=dify_config.STORAGE_TYPE,
key=file_key,
name=filename,
size=file_size,
@@ -106,10 +105,9 @@ class FileService:
storage.save(file_key, text.encode('utf-8'))
# save file to db
- config = current_app.config
upload_file = UploadFile(
tenant_id=current_user.current_tenant_id,
- storage_type=config['STORAGE_TYPE'],
+ storage_type=dify_config.STORAGE_TYPE,
key=file_key,
name=text_name + '.txt',
size=len(text),
@@ -138,7 +136,7 @@ class FileService:
# extract text from file
extension = upload_file.extension
- etl_type = current_app.config['ETL_TYPE']
+ etl_type = dify_config.ETL_TYPE
allowed_extensions = UNSTRUCTURED_ALLOWED_EXTENSIONS if etl_type == 'Unstructured' else ALLOWED_EXTENSIONS
if extension.lower() not in allowed_extensions:
raise UnsupportedFileTypeError()
diff --git a/api/services/recommended_app_service.py b/api/services/recommended_app_service.py
index d32ab2af33..c4733b6d3f 100644
--- a/api/services/recommended_app_service.py
+++ b/api/services/recommended_app_service.py
@@ -4,8 +4,8 @@ from os import path
from typing import Optional
import requests
-from flask import current_app
+from configs import dify_config
from constants.languages import languages
from extensions.ext_database import db
from models.model import App, RecommendedApp
@@ -25,7 +25,7 @@ class RecommendedAppService:
:param language: language
:return:
"""
- mode = current_app.config.get('HOSTED_FETCH_APP_TEMPLATES_MODE', 'remote')
+ mode = dify_config.HOSTED_FETCH_APP_TEMPLATES_MODE
if mode == 'remote':
try:
result = cls._fetch_recommended_apps_from_dify_official(language)
@@ -104,7 +104,7 @@ class RecommendedAppService:
:param language: language
:return:
"""
- domain = current_app.config.get('HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN', 'https://tmpl.dify.ai')
+ domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN
url = f'{domain}/apps?language={language}'
response = requests.get(url, timeout=(3, 10))
if response.status_code != 200:
@@ -134,7 +134,7 @@ class RecommendedAppService:
:param app_id: app id
:return:
"""
- mode = current_app.config.get('HOSTED_FETCH_APP_TEMPLATES_MODE', 'remote')
+ mode = dify_config.HOSTED_FETCH_APP_TEMPLATES_MODE
if mode == 'remote':
try:
result = cls._fetch_recommended_app_detail_from_dify_official(app_id)
@@ -157,7 +157,7 @@ class RecommendedAppService:
:param app_id: App ID
:return:
"""
- domain = current_app.config.get('HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN', 'https://tmpl.dify.ai')
+ domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN
url = f'{domain}/apps/{app_id}'
response = requests.get(url, timeout=(3, 10))
if response.status_code != 200:
diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py
index 9a0d6ca8d9..ecc065d521 100644
--- a/api/services/tools/api_tools_manage_service.py
+++ b/api/services/tools/api_tools_manage_service.py
@@ -429,6 +429,7 @@ class ApiToolManageService:
db_provider=provider,
decrypt_credentials=True
)
+ user_provider.labels = labels
# add icon
ToolTransformService.repack_provider(user_provider)
diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py
index 5c77732468..cfce3fbd01 100644
--- a/api/services/tools/tools_transform_service.py
+++ b/api/services/tools/tools_transform_service.py
@@ -2,8 +2,7 @@ import json
import logging
from typing import Optional, Union
-from flask import current_app
-
+from configs import dify_config
from core.tools.entities.api_entities import UserTool, UserToolProvider
from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_bundle import ApiToolBundle
@@ -29,7 +28,7 @@ class ToolTransformService:
"""
get tool provider icon url
"""
- url_prefix = (current_app.config.get("CONSOLE_API_URL")
+ url_prefix = (dify_config.CONSOLE_API_URL
+ "/console/api/workspaces/current/tool-provider/")
if provider_type == ToolProviderType.BUILT_IN.value:
diff --git a/api/services/workspace_service.py b/api/services/workspace_service.py
index 778b4e51d3..2bcbe5c6f6 100644
--- a/api/services/workspace_service.py
+++ b/api/services/workspace_service.py
@@ -1,7 +1,7 @@
-from flask import current_app
from flask_login import current_user
+from configs import dify_config
from extensions.ext_database import db
from models.account import Tenant, TenantAccountJoin, TenantAccountJoinRole
from services.account_service import TenantService
@@ -35,7 +35,7 @@ class WorkspaceService:
if can_replace_logo and TenantService.has_roles(tenant,
[TenantAccountJoinRole.OWNER, TenantAccountJoinRole.ADMIN]):
- base_url = current_app.config.get('FILES_URL')
+ base_url = dify_config.FILES_URL
replace_webapp_logo = f'{base_url}/files/workspaces/{tenant.id}/webapp-logo' if tenant.custom_config_dict.get('replace_webapp_logo') else None
remove_webapp_brand = tenant.custom_config_dict.get('remove_webapp_brand', False)
diff --git a/api/tasks/document_indexing_task.py b/api/tasks/document_indexing_task.py
index 43d1cc13f9..cc93a1341e 100644
--- a/api/tasks/document_indexing_task.py
+++ b/api/tasks/document_indexing_task.py
@@ -4,8 +4,8 @@ import time
import click
from celery import shared_task
-from flask import current_app
+from configs import dify_config
from core.indexing_runner import DocumentIsPausedException, IndexingRunner
from extensions.ext_database import db
from models.dataset import Dataset, Document
@@ -32,7 +32,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
if features.billing.enabled:
vector_space = features.vector_space
count = len(document_ids)
- batch_upload_limit = int(current_app.config['BATCH_UPLOAD_LIMIT'])
+ batch_upload_limit = int(dify_config.BATCH_UPLOAD_LIMIT)
if count > batch_upload_limit:
raise ValueError(f"You have reached the batch upload limit of {batch_upload_limit}.")
if 0 < vector_space.limit <= vector_space.size:
diff --git a/api/tasks/duplicate_document_indexing_task.py b/api/tasks/duplicate_document_indexing_task.py
index 1854589e7f..884e222d1b 100644
--- a/api/tasks/duplicate_document_indexing_task.py
+++ b/api/tasks/duplicate_document_indexing_task.py
@@ -4,8 +4,8 @@ import time
import click
from celery import shared_task
-from flask import current_app
+from configs import dify_config
from core.indexing_runner import DocumentIsPausedException, IndexingRunner
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
@@ -33,7 +33,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
if features.billing.enabled:
vector_space = features.vector_space
count = len(document_ids)
- batch_upload_limit = int(current_app.config['BATCH_UPLOAD_LIMIT'])
+ batch_upload_limit = int(dify_config.BATCH_UPLOAD_LIMIT)
if count > batch_upload_limit:
raise ValueError(f"You have reached the batch upload limit of {batch_upload_limit}.")
if 0 < vector_space.limit <= vector_space.size:
diff --git a/api/tasks/mail_invite_member_task.py b/api/tasks/mail_invite_member_task.py
index 1f40c05077..a46eafa797 100644
--- a/api/tasks/mail_invite_member_task.py
+++ b/api/tasks/mail_invite_member_task.py
@@ -3,8 +3,9 @@ import time
import click
from celery import shared_task
-from flask import current_app, render_template
+from flask import render_template
+from configs import dify_config
from extensions.ext_mail import mail
@@ -29,7 +30,7 @@ def send_invite_member_mail_task(language: str, to: str, token: str, inviter_nam
# send invite member mail using different languages
try:
- url = f'{current_app.config.get("CONSOLE_WEB_URL")}/activate?token={token}'
+ url = f'{dify_config.CONSOLE_WEB_URL}/activate?token={token}'
if language == 'zh-Hans':
html_content = render_template('invite_member_mail_template_zh-CN.html',
to=to,
diff --git a/api/tasks/mail_reset_password_task.py b/api/tasks/mail_reset_password_task.py
index 0e64c6f163..4e1b8a8913 100644
--- a/api/tasks/mail_reset_password_task.py
+++ b/api/tasks/mail_reset_password_task.py
@@ -3,8 +3,9 @@ import time
import click
from celery import shared_task
-from flask import current_app, render_template
+from flask import render_template
+from configs import dify_config
from extensions.ext_mail import mail
@@ -24,7 +25,7 @@ def send_reset_password_mail_task(language: str, to: str, token: str):
# send reset password mail using different languages
try:
- url = f'{current_app.config.get("CONSOLE_WEB_URL")}/forgot-password?token={token}'
+ url = f'{dify_config.CONSOLE_WEB_URL}/forgot-password?token={token}'
if language == 'zh-Hans':
html_content = render_template('reset_password_mail_template_zh-CN.html',
to=to,
diff --git a/api/tests/integration_tests/vdb/myscale/__init__.py b/api/tests/integration_tests/vdb/myscale/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/api/tests/integration_tests/vdb/myscale/test_myscale.py b/api/tests/integration_tests/vdb/myscale/test_myscale.py
new file mode 100644
index 0000000000..b6260d549a
--- /dev/null
+++ b/api/tests/integration_tests/vdb/myscale/test_myscale.py
@@ -0,0 +1,29 @@
+from core.rag.datasource.vdb.myscale.myscale_vector import MyScaleConfig, MyScaleVector
+from tests.integration_tests.vdb.test_vector_store import (
+ AbstractVectorTest,
+ setup_mock_redis,
+)
+
+
+class MyScaleVectorTest(AbstractVectorTest):
+ def __init__(self):
+ super().__init__()
+ self.vector = MyScaleVector(
+ collection_name=self.collection_name,
+ config=MyScaleConfig(
+ host="localhost",
+ port=8123,
+ user="default",
+ password="",
+ database="dify",
+ fts_params="",
+ ),
+ )
+
+ def get_ids_by_metadata_field(self):
+ ids = self.vector.get_ids_by_metadata_field(key='document_id', value=self.example_doc_id)
+ assert len(ids) == 1
+
+
+def test_myscale_vector(setup_mock_redis):
+ MyScaleVectorTest().run_all_tests()
diff --git a/dev/pytest/pytest_vdb.sh b/dev/pytest/pytest_vdb.sh
index c954c528fb..cb8aae6740 100755
--- a/dev/pytest/pytest_vdb.sh
+++ b/dev/pytest/pytest_vdb.sh
@@ -3,6 +3,7 @@ set -x
pytest api/tests/integration_tests/vdb/chroma \
api/tests/integration_tests/vdb/milvus \
+ api/tests/integration_tests/vdb/myscale \
api/tests/integration_tests/vdb/pgvecto_rs \
api/tests/integration_tests/vdb/pgvector \
api/tests/integration_tests/vdb/qdrant \
diff --git a/docker-legacy/docker-compose.yaml b/docker-legacy/docker-compose.yaml
index 30f505db41..416a1b8c52 100644
--- a/docker-legacy/docker-compose.yaml
+++ b/docker-legacy/docker-compose.yaml
@@ -40,7 +40,7 @@ services:
# The default value is 300 seconds.
FILES_ACCESS_TIMEOUT: 300
# The maximum number of active requests for the application, where 0 means unlimited, should be a non-negative integer.
- APP_MAX_ACTIVE_REQUESTS: ${FILES_ACCESS_TIMEOUT:-0}
+ APP_MAX_ACTIVE_REQUESTS: 0
# When enabled, migrations will be executed prior to application startup and the application will start after the migrations have completed.
MIGRATION_ENABLED: 'true'
# The configurations of postgres database connection.
diff --git a/docker/.env.example b/docker/.env.example
index 3e132c1b5d..4f7e13e823 100644
--- a/docker/.env.example
+++ b/docker/.env.example
@@ -243,7 +243,7 @@ TENCENT_COS_SCHEME=your-scheme
# ------------------------------
# The type of vector store to use.
-# Supported values are `weaviate`, `qdrant`, `milvus`, `relyt`, `pgvector`, `chroma`, `opensearch`, `tidb_vector`, `oracle`, `tencent`.
+# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `chroma`, `opensearch`, `tidb_vector`, `oracle`, `tencent`.
VECTOR_STORE=weaviate
# The Weaviate endpoint URL. Only available when VECTOR_STORE is `weaviate`.
@@ -274,6 +274,16 @@ MILVUS_PASSWORD=Milvus
# The milvus tls switch.
MILVUS_SECURE=false
+# MyScale configuration, only available when VECTOR_STORE is `myscale`
+# For multi-language support, please set MYSCALE_FTS_PARAMS with referring to:
+# https://myscale.com/docs/en/text-search/#understanding-fts-index-parameters
+MYSCALE_HOST=myscale
+MYSCALE_PORT=8123
+MYSCALE_USER=default
+MYSCALE_PASSWORD=
+MYSCALE_DATABASE=dify
+MYSCALE_FTS_PARAMS=
+
# pgvector configurations, only available when VECTOR_STORE is `pgvecto-rs or pgvector`
PGVECTOR_HOST=pgvector
PGVECTOR_PORT=5432
diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml
index c34b50505f..beca104a68 100644
--- a/docker/docker-compose.yaml
+++ b/docker/docker-compose.yaml
@@ -12,7 +12,7 @@ x-shared-env: &shared-api-worker-env
OPENAI_API_BASE: ${OPENAI_API_BASE:-https://api.openai.com/v1}
FILES_URL: ${FILES_URL:-}
FILES_ACCESS_TIMEOUT: ${FILES_ACCESS_TIMEOUT:-300}
- APP_MAX_ACTIVE_REQUESTS: ${FILES_ACCESS_TIMEOUT:-0}
+ APP_MAX_ACTIVE_REQUESTS: ${APP_MAX_ACTIVE_REQUESTS:-0}
MIGRATION_ENABLED: ${MIGRATION_ENABLED:-true}
DEPLOY_ENV: ${DEPLOY_ENV:-PRODUCTION}
DIFY_BIND_ADDRESS: ${DIFY_BIND_ADDRESS:-0.0.0.0}
@@ -83,6 +83,12 @@ x-shared-env: &shared-api-worker-env
MILVUS_USER: ${MILVUS_USER:-root}
MILVUS_PASSWORD: ${MILVUS_PASSWORD:-Milvus}
MILVUS_SECURE: ${MILVUS_SECURE:-false}
+ MYSCALE_HOST: ${MYSCALE_HOST:-myscale}
+ MYSCALE_PORT: ${MYSCALE_PORT:-8123}
+ MYSCALE_USER: ${MYSCALE_USER:-default}
+ MYSCALE_PASSWORD: ${MYSCALE_PASSWORD:-}
+ MYSCALE_DATABASE: ${MYSCALE_DATABASE:-dify}
+ MYSCALE_FTS_PARAMS: ${MYSCALE_FTS_PARAMS:-}
RELYT_HOST: ${RELYT_HOST:-db}
RELYT_PORT: ${RELYT_PORT:-5432}
RELYT_USER: ${RELYT_USER:-postgres}
@@ -532,6 +538,21 @@ services:
depends_on:
- opensearch
+ # MyScale vector database
+ myscale:
+ container_name: myscale
+ image: myscale/myscaledb:1.6
+ profiles:
+ - myscale
+ restart: always
+ tty: true
+ volumes:
+ - ./volumes/myscale/data:/var/lib/clickhouse
+ - ./volumes/myscale/log:/var/log/clickhouse-server
+ - ./volumes/myscale/config/users.d/custom_users_config.xml:/etc/clickhouse-server/users.d/custom_users_config.xml
+ ports:
+ - "${MYSCALE_PORT:-8123}:${MYSCALE_PORT:-8123}"
+
networks:
# create a network between sandbox, api and ssrf_proxy, and can not access outside.
ssrf_proxy_network:
diff --git a/docker/volumes/myscale/config/users.d/custom_users_config.xml b/docker/volumes/myscale/config/users.d/custom_users_config.xml
new file mode 100644
index 0000000000..67f24b69ee
--- /dev/null
+++ b/docker/volumes/myscale/config/users.d/custom_users_config.xml
@@ -0,0 +1,17 @@
+
+
+
+
+
+ ::1
+ 127.0.0.1
+ 10.0.0.0/8
+ 172.16.0.0/12
+ 192.168.0.0/16
+
+ default
+ default
+ 1
+
+
+
\ No newline at end of file
diff --git a/web/app/components/app/configuration/config-var/config-modal/index.tsx b/web/app/components/app/configuration/config-var/config-modal/index.tsx
index becd5085a1..20fcf49de1 100644
--- a/web/app/components/app/configuration/config-var/config-modal/index.tsx
+++ b/web/app/components/app/configuration/config-var/config-modal/index.tsx
@@ -17,7 +17,6 @@ import Switch from '@/app/components/base/switch'
import { ChangeType, InputVarType } from '@/app/components/workflow/types'
const TEXT_MAX_LENGTH = 256
-const PARAGRAPH_MAX_LENGTH = 1032 * 32
export type IConfigModalProps = {
isCreate?: boolean
@@ -167,7 +166,7 @@ const ConfigModal: FC = ({
{isStringInput && (
-
+
)}
diff --git a/web/app/components/base/mermaid/index.tsx b/web/app/components/base/mermaid/index.tsx
index bef26b7a36..dc01338a8c 100644
--- a/web/app/components/base/mermaid/index.tsx
+++ b/web/app/components/base/mermaid/index.tsx
@@ -1,6 +1,8 @@
import React, { useEffect, useRef, useState } from 'react'
import mermaid from 'mermaid'
+import { usePrevious } from 'ahooks'
import CryptoJS from 'crypto-js'
+import { ExclamationTriangleIcon } from '@heroicons/react/24/outline'
import LoadingAnim from '@/app/components/base/chat/chat/loading-anim'
let mermaidAPI: any
@@ -40,32 +42,15 @@ const Flowchart = React.forwardRef((props: {
}, ref) => {
const [svgCode, setSvgCode] = useState(null)
const chartId = useRef(`flowchart_${CryptoJS.MD5(props.PrimitiveCode).toString()}`)
- const [isRender, setIsRender] = useState(false)
+ const prevPrimitiveCode = usePrevious(props.PrimitiveCode)
const [isLoading, setIsLoading] = useState(true)
-
- const clearFlowchartCache = () => {
- for (let i = localStorage.length - 1; i >= 0; --i) {
- const key = localStorage.key(i)
- if (key && key.startsWith('flowchart_'))
- localStorage.removeItem(key)
- }
- }
+ const timeRef = useRef()
+ const [errMsg, setErrMsg] = useState('')
const renderFlowchart = async (PrimitiveCode: string) => {
try {
- const cachedSvg: any = localStorage.getItem(chartId.current)
- if (cachedSvg) {
- setSvgCode(cachedSvg)
- setIsLoading(false)
- return
- }
-
if (typeof window !== 'undefined' && mermaidAPI) {
const svgGraph = await mermaidAPI.render(chartId.current, PrimitiveCode)
- const dom = new DOMParser().parseFromString(svgGraph.svg, 'text/xml')
- if (!dom.querySelector('g.main'))
- throw new Error('empty svg')
-
const base64Svg: any = await svgToBase64(svgGraph.svg)
setSvgCode(base64Svg)
setIsLoading(false)
@@ -74,30 +59,26 @@ const Flowchart = React.forwardRef((props: {
}
}
catch (error) {
- clearFlowchartCache()
- // eslint-disable-next-line @typescript-eslint/no-use-before-define
- handleReRender()
+ if (prevPrimitiveCode === props.PrimitiveCode) {
+ setIsLoading(false)
+ setErrMsg((error as Error).message)
+ }
}
}
- const handleReRender = () => {
- setIsRender(false)
- setSvgCode(null)
- if (chartId.current)
- localStorage.removeItem(chartId.current)
-
- setTimeout(() => {
- setIsRender(true)
- renderFlowchart(props.PrimitiveCode)
- }, 100)
- }
-
useEffect(() => {
- setIsRender(false)
- setTimeout(() => {
- setIsRender(true)
+ const cachedSvg: any = localStorage.getItem(chartId.current)
+ if (cachedSvg) {
+ setSvgCode(cachedSvg)
+ setIsLoading(false)
+ return
+ }
+ if (timeRef.current)
+ clearTimeout(timeRef.current)
+
+ timeRef.current = setTimeout(() => {
renderFlowchart(props.PrimitiveCode)
- }, 100)
+ }, 300)
}, [props.PrimitiveCode])
return (
@@ -105,16 +86,24 @@ const Flowchart = React.forwardRef((props: {
// @ts-expect-error
{
- isRender
- &&
- {svgCode && }
-
+ svgCode
+ &&
+ {svgCode && }
+
}
{isLoading
&&
}
+ {
+ errMsg
+ &&
+
+
+ {errMsg}
+
+ }
)
})
diff --git a/web/app/components/develop/template/template_workflow.en.mdx b/web/app/components/develop/template/template_workflow.en.mdx
index 806cf992e0..9580c2aa0b 100644
--- a/web/app/components/develop/template/template_workflow.en.mdx
+++ b/web/app/components/develop/template/template_workflow.en.mdx
@@ -213,7 +213,7 @@ Workflow applications offers non-session support and is ideal for translation, a
---