merge main

This commit is contained in:
takatost 2024-03-28 14:38:21 +08:00
commit 858ab8c8c4
32 changed files with 533 additions and 107 deletions

View File

@ -100,10 +100,12 @@ docker compose up -d
After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization installation process.
### Helm Chart
#### Deploy with Helm Chart
Big thanks to @BorisPolonsky for providing us with a [Helm Chart](https://helm.sh/) version, which allows Dify to be deployed on Kubernetes.
You can go to https://github.com/BorisPolonsky/dify-helm for deployment information.
[Helm Chart](https://helm.sh/) version, which allows Dify to be deployed on Kubernetes.
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
### Configuration
@ -120,6 +122,7 @@ For those who'd like to contribute code, see our [Contribution Guide](https://gi
At the same time, please consider supporting Dify by sharing it on social media and at events and conferences.
### Contributors
<a href="https://github.com/langgenius/dify/graphs/contributors">

View File

@ -94,10 +94,12 @@ docker compose up -d
运行后,可以在浏览器上访问 [http://localhost/install](http://localhost/install) 进入 Dify 控制台并开始初始化安装操作。
### Helm Chart
#### 使用 Helm Chart 部署
非常感谢 @BorisPolonsky 为我们提供了一个 [Helm Chart](https://helm.sh/) 版本,可以在 Kubernetes 上部署 Dify。
您可以前往 https://github.com/BorisPolonsky/dify-helm 来获取部署信息。
使用 [Helm Chart](https://helm.sh/) 版本,可以在 Kubernetes 上部署 Dify。
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
### 配置

View File

@ -1,5 +1,4 @@
import enum
import importlib.util
import json
import logging
import os
@ -7,6 +6,7 @@ from typing import Any, Optional
from pydantic import BaseModel
from core.utils.module_import_helper import load_single_subclass_from_source
from core.utils.position_helper import sort_to_dict_by_position_map
@ -73,17 +73,9 @@ class Extensible:
# Dynamic loading {subdir_name}.py file and find the subclass of Extensible
py_path = os.path.join(subdir_path, extension_name + '.py')
spec = importlib.util.spec_from_file_location(extension_name, py_path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
extension_class = None
for name, obj in vars(mod).items():
if isinstance(obj, type) and issubclass(obj, cls) and obj != cls:
extension_class = obj
break
if not extension_class:
try:
extension_class = load_single_subclass_from_source(extension_name, py_path, cls)
except Exception:
logging.warning(f"Missing subclass of {cls.__name__} in {py_path}, Skip.")
continue

View File

@ -1,4 +1,3 @@
import importlib
import os
from abc import ABC, abstractmethod
@ -7,6 +6,7 @@ import yaml
from core.model_runtime.entities.model_entities import AIModelEntity, ModelType
from core.model_runtime.entities.provider_entities import ProviderEntity
from core.model_runtime.model_providers.__base.ai_model import AIModel
from core.utils.module_import_helper import get_subclasses_from_module, import_module_from_source
class ModelProvider(ABC):
@ -104,17 +104,10 @@ class ModelProvider(ABC):
# Dynamic loading {model_type_name}.py file and find the subclass of AIModel
parent_module = '.'.join(self.__class__.__module__.split('.')[:-1])
spec = importlib.util.spec_from_file_location(f"{parent_module}.{model_type_name}.{model_type_name}", model_type_py_path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
model_class = None
for name, obj in vars(mod).items():
if (isinstance(obj, type) and issubclass(obj, AIModel) and not obj.__abstractmethods__
and obj != AIModel and obj.__module__ == mod.__name__):
model_class = obj
break
mod = import_module_from_source(
f'{parent_module}.{model_type_name}.{model_type_name}', model_type_py_path)
model_class = next(filter(lambda x: x.__module__ == mod.__name__ and not x.__abstractmethods__,
get_subclasses_from_module(mod, AIModel)), None)
if not model_class:
raise Exception(f'Missing AIModel Class for model type {model_type} in {model_type_py_path}')

View File

@ -1,4 +1,3 @@
import importlib
import logging
import os
from typing import Optional
@ -10,6 +9,7 @@ from core.model_runtime.entities.provider_entities import ProviderConfig, Provid
from core.model_runtime.model_providers.__base.model_provider import ModelProvider
from core.model_runtime.schema_validators.model_credential_schema_validator import ModelCredentialSchemaValidator
from core.model_runtime.schema_validators.provider_credential_schema_validator import ProviderCredentialSchemaValidator
from core.utils.module_import_helper import load_single_subclass_from_source
from core.utils.position_helper import get_position_map, sort_to_dict_by_position_map
logger = logging.getLogger(__name__)
@ -229,15 +229,10 @@ class ModelProviderFactory:
# Dynamic loading {model_provider_name}.py file and find the subclass of ModelProvider
py_path = os.path.join(model_provider_dir_path, model_provider_name + '.py')
spec = importlib.util.spec_from_file_location(f'core.model_runtime.model_providers.{model_provider_name}.{model_provider_name}', py_path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
model_provider_class = None
for name, obj in vars(mod).items():
if isinstance(obj, type) and issubclass(obj, ModelProvider) and obj != ModelProvider:
model_provider_class = obj
break
model_provider_class = load_single_subclass_from_source(
module_name=f'core.model_runtime.model_providers.{model_provider_name}.{model_provider_name}',
script_path=py_path,
parent_type=ModelProvider)
if not model_provider_class:
logger.warning(f"Missing Model Provider Class that extends ModelProvider in {py_path}, Skip.")

View File

@ -0,0 +1,37 @@
model: ernie-3.5-8k
label:
en_US: Ernie-3.5-8K
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 4096
parameter_rules:
- name: temperature
use_template: temperature
min: 0.1
max: 1.0
default: 0.8
- name: top_p
use_template: top_p
- name: max_tokens
use_template: max_tokens
default: 1024
min: 2
max: 2048
- name: presence_penalty
use_template: presence_penalty
- name: frequency_penalty
use_template: frequency_penalty
- name: response_format
use_template: response_format
- name: disable_search
label:
zh_Hans: 禁用搜索
en_US: Disable Search
type: boolean
help:
zh_Hans: 禁用模型自行进行外部搜索。
en_US: Disable the model to perform external search.
required: false

View File

@ -0,0 +1,37 @@
model: ernie-3.5-8k-0205
label:
en_US: Ernie-3.5-8K-0205
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 8192
parameter_rules:
- name: temperature
use_template: temperature
min: 0.1
max: 1.0
default: 0.8
- name: top_p
use_template: top_p
- name: max_tokens
use_template: max_tokens
default: 1024
min: 2
max: 2048
- name: presence_penalty
use_template: presence_penalty
- name: frequency_penalty
use_template: frequency_penalty
- name: response_format
use_template: response_format
- name: disable_search
label:
zh_Hans: 禁用搜索
en_US: Disable Search
type: boolean
help:
zh_Hans: 禁用模型自行进行外部搜索。
en_US: Disable the model to perform external search.
required: false

View File

@ -0,0 +1,37 @@
model: ernie-3.5-8k-1222
label:
en_US: Ernie-3.5-8K-1222
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 8192
parameter_rules:
- name: temperature
use_template: temperature
min: 0.1
max: 1.0
default: 0.8
- name: top_p
use_template: top_p
- name: max_tokens
use_template: max_tokens
default: 1024
min: 2
max: 2048
- name: presence_penalty
use_template: presence_penalty
- name: frequency_penalty
use_template: frequency_penalty
- name: response_format
use_template: response_format
- name: disable_search
label:
zh_Hans: 禁用搜索
en_US: Disable Search
type: boolean
help:
zh_Hans: 禁用模型自行进行外部搜索。
en_US: Disable the model to perform external search.
required: false

View File

@ -0,0 +1,37 @@
model: ernie-3.5-8k
label:
en_US: Ernie-3.5-8K
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 8192
parameter_rules:
- name: temperature
use_template: temperature
min: 0.1
max: 1.0
default: 0.8
- name: top_p
use_template: top_p
- name: max_tokens
use_template: max_tokens
default: 1024
min: 2
max: 2048
- name: presence_penalty
use_template: presence_penalty
- name: frequency_penalty
use_template: frequency_penalty
- name: response_format
use_template: response_format
- name: disable_search
label:
zh_Hans: 禁用搜索
en_US: Disable Search
type: boolean
help:
zh_Hans: 禁用模型自行进行外部搜索。
en_US: Disable the model to perform external search.
required: false

View File

@ -0,0 +1,37 @@
model: ernie-4.0-8k
label:
en_US: Ernie-4.0-8K
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 8192
parameter_rules:
- name: temperature
use_template: temperature
min: 0.1
max: 1.0
default: 0.8
- name: top_p
use_template: top_p
- name: max_tokens
use_template: max_tokens
default: 1024
min: 2
max: 2048
- name: presence_penalty
use_template: presence_penalty
- name: frequency_penalty
use_template: frequency_penalty
- name: response_format
use_template: response_format
- name: disable_search
label:
zh_Hans: 禁用搜索
en_US: Disable Search
type: boolean
help:
zh_Hans: 禁用模型自行进行外部搜索。
en_US: Disable the model to perform external search.
required: false

View File

@ -36,3 +36,4 @@ parameter_rules:
zh_Hans: 禁用模型自行进行外部搜索。
en_US: Disable the model to perform external search.
required: false
deprecated: true

View File

@ -36,3 +36,4 @@ parameter_rules:
zh_Hans: 禁用模型自行进行外部搜索。
en_US: Disable the model to perform external search.
required: false
deprecated: true

View File

@ -27,3 +27,4 @@ parameter_rules:
use_template: frequency_penalty
- name: response_format
use_template: response_format
deprecated: true

View File

@ -36,3 +36,4 @@ parameter_rules:
required: false
- name: response_format
use_template: response_format
deprecated: true

View File

@ -0,0 +1,30 @@
model: ernie-lite-8k-0308
label:
en_US: ERNIE-Lite-8K-0308
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 8192
parameter_rules:
- name: temperature
use_template: temperature
min: 0.1
max: 1.0
default: 0.95
- name: top_p
use_template: top_p
min: 0
max: 1.0
default: 0.7
- name: max_tokens
use_template: max_tokens
default: 1024
min: 2
max: 1024
- name: presence_penalty
use_template: presence_penalty
default: 1.0
min: 1.0
max: 2.0

View File

@ -0,0 +1,30 @@
model: ernie-lite-8k-0922
label:
en_US: ERNIE-Lite-8K-0922
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 8192
parameter_rules:
- name: temperature
use_template: temperature
min: 0.1
max: 1.0
default: 0.95
- name: top_p
use_template: top_p
min: 0
max: 1.0
default: 0.7
- name: max_tokens
use_template: max_tokens
default: 1024
min: 2
max: 1024
- name: presence_penalty
use_template: presence_penalty
default: 1.0
min: 1.0
max: 2.0

View File

@ -0,0 +1,30 @@
model: ernie-speed-128k
label:
en_US: ERNIE-Speed-128K
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 128000
parameter_rules:
- name: temperature
use_template: temperature
min: 0.1
max: 1.0
default: 0.95
- name: top_p
use_template: top_p
min: 0
max: 1.0
default: 0.7
- name: max_tokens
use_template: max_tokens
default: 1024
min: 2
max: 1024
- name: presence_penalty
use_template: presence_penalty
default: 1.0
min: 1.0
max: 2.0

View File

@ -0,0 +1,30 @@
model: ernie-speed-8k
label:
en_US: ERNIE-Speed-8K
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 8192
parameter_rules:
- name: temperature
use_template: temperature
min: 0.1
max: 1.0
default: 0.95
- name: top_p
use_template: top_p
min: 0
max: 1.0
default: 0.7
- name: max_tokens
use_template: max_tokens
default: 1024
min: 2
max: 1024
- name: presence_penalty
use_template: presence_penalty
default: 1.0
min: 1.0
max: 2.0

View File

@ -0,0 +1,25 @@
model: ernie-speed-appbuilder
label:
en_US: ERNIE-Speed-AppBuilder
model_type: llm
features:
- agent-thought
model_properties:
mode: chat
context_size: 8192
parameter_rules:
- name: temperature
use_template: temperature
min: 0.1
max: 1.0
default: 0.95
- name: top_p
use_template: top_p
min: 0
max: 1.0
default: 0.7
- name: presence_penalty
use_template: presence_penalty
default: 1.0
min: 1.0
max: 2.0

View File

@ -121,15 +121,29 @@ class ErnieMessage:
class ErnieBotModel:
api_bases = {
'ernie-bot': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions',
'ernie-bot': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-3.5-4k-0205',
'ernie-bot-4': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions_pro',
'ernie-bot-8k': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie_bot_8k',
'ernie-bot-8k': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions',
'ernie-bot-turbo': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant',
'ernie-3.5-8k': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions',
'ernie-3.5-8k-0205': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-3.5-8k-0205',
'ernie-3.5-8k-1222': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-3.5-8k-1222',
'ernie-3.5-4k-0205': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-3.5-4k-0205',
'ernie-4.0-8k': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions_pro',
'ernie-speed-8k': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie_speed',
'ernie-speed-128k': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-speed-128k',
'ernie-speed-appbuilder': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ai_apaas',
'ernie-lite-8k-0922': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant',
'ernie-lite-8k-0308': 'https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-lite-8k',
}
function_calling_supports = [
'ernie-bot',
'ernie-bot-8k',
'ernie-3.5-8k',
'ernie-3.5-8k-0205',
'ernie-3.5-8k-1222',
'ernie-3.5-4k-0205'
]
api_key: str = ''
@ -285,6 +299,12 @@ class ErnieBotModel:
**parameters
}
if 'max_tokens' in parameters and type(parameters['max_tokens']) == int:
body['max_output_tokens'] = parameters['max_tokens']
if 'presence_penalty' in parameters and type(parameters['presence_penalty']) == float:
body['penalty_score'] = parameters['presence_penalty']
if system_message:
body['system'] = system_message

View File

@ -1,4 +1,3 @@
import importlib
from abc import abstractmethod
from os import listdir, path
from typing import Any
@ -16,6 +15,7 @@ from core.tools.errors import (
from core.tools.provider.tool_provider import ToolProviderController
from core.tools.tool.builtin_tool import BuiltinTool
from core.tools.tool.tool import Tool
from core.utils.module_import_helper import load_single_subclass_from_source
class BuiltinToolProviderController(ToolProviderController):
@ -63,16 +63,11 @@ class BuiltinToolProviderController(ToolProviderController):
tool_name = tool_file.split(".")[0]
tool = load(f.read(), FullLoader)
# get tool class, import the module
py_path = path.join(path.dirname(path.realpath(__file__)), 'builtin', provider, 'tools', f'{tool_name}.py')
spec = importlib.util.spec_from_file_location(f'core.tools.provider.builtin.{provider}.tools.{tool_name}', py_path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
# get all the classes in the module
classes = [x for _, x in vars(mod).items()
if isinstance(x, type) and x not in [BuiltinTool, Tool] and issubclass(x, BuiltinTool)
]
assistant_tool_class = classes[0]
assistant_tool_class = load_single_subclass_from_source(
module_name=f'core.tools.provider.builtin.{provider}.tools.{tool_name}',
script_path=path.join(path.dirname(path.realpath(__file__)),
'builtin', provider, 'tools', f'{tool_name}.py'),
parent_type=BuiltinTool)
tools.append(assistant_tool_class(**tool))
self.tools = tools

View File

@ -1,4 +1,3 @@
import importlib
import json
import logging
import mimetypes
@ -33,6 +32,7 @@ from core.tools.utils.configuration import (
ToolParameterConfigurationManager,
)
from core.tools.utils.encoder import serialize_base_model_dict
from core.utils.module_import_helper import load_single_subclass_from_source
from core.workflow.nodes.tool.entities import ToolEntity
from extensions.ext_database import db
from models.tools import ApiToolProvider, BuiltinToolProvider
@ -73,21 +73,11 @@ class ToolManager:
if provider_entity is None:
# fetch the provider from .provider.builtin
py_path = path.join(path.dirname(path.realpath(__file__)), 'builtin', provider, f'{provider}.py')
spec = importlib.util.spec_from_file_location(f'core.tools.provider.builtin.{provider}.{provider}', py_path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
# get all the classes in the module
classes = [ x for _, x in vars(mod).items()
if isinstance(x, type) and x != ToolProviderController and issubclass(x, ToolProviderController)
]
if len(classes) == 0:
raise ToolProviderNotFoundError(f'provider {provider} not found')
if len(classes) > 1:
raise ToolProviderNotFoundError(f'multiple providers found for {provider}')
provider_entity = classes[0]()
provider_class = load_single_subclass_from_source(
module_name=f'core.tools.provider.builtin.{provider}.{provider}',
script_path=path.join(path.dirname(path.realpath(__file__)), 'builtin', provider, f'{provider}.py'),
parent_type=ToolProviderController)
provider_entity = provider_class()
return provider_entity.invoke(tool_id, tool_name, tool_parameters, credentials, prompt_messages)
@ -237,13 +227,13 @@ class ToolManager:
parameter_value = parameter_rule.default
if not parameter_value and parameter_rule.required:
raise ValueError(f"tool parameter {parameter_rule.name} not found in tool config")
if parameter_rule.type == ToolParameter.ToolParameterType.SELECT:
# check if tool_parameter_config in options
options = list(map(lambda x: x.value, parameter_rule.options))
if parameter_value not in options:
raise ValueError(f"tool parameter {parameter_rule.name} value {parameter_value} not in options {options}")
# convert tool parameter config to correct type
try:
if parameter_rule.type == ToolParameter.ToolParameterType.NUMBER:
@ -265,7 +255,7 @@ class ToolManager:
parameter_value = str(parameter_value)
except Exception as e:
raise ValueError(f"tool parameter {parameter_rule.name} value {parameter_value} is not correct type")
return parameter_value
@staticmethod
@ -297,16 +287,16 @@ class ToolManager:
tool_entity.runtime.runtime_parameters.update(runtime_parameters)
return tool_entity
@staticmethod
def get_workflow_tool_runtime(tenant_id: str, workflow_tool: ToolEntity, agent_callback: DifyAgentCallbackHandler):
"""
get the workflow tool runtime
"""
tool_entity = ToolManager.get_tool_runtime(
provider_type=workflow_tool.provider_type,
provider_name=workflow_tool.provider_id,
tool_name=workflow_tool.tool_name,
provider_type=workflow_tool.provider_type,
provider_name=workflow_tool.provider_id,
tool_name=workflow_tool.tool_name,
tenant_id=tenant_id,
agent_callback=agent_callback
)
@ -318,7 +308,7 @@ class ToolManager:
if parameter.form == ToolParameter.ToolParameterForm.FORM:
value = ToolManager._init_runtime_parameter(parameter, workflow_tool.tool_configurations)
runtime_parameters[parameter.name] = value
# decrypt runtime parameters
encryption_manager = ToolParameterConfigurationManager(
tenant_id=tenant_id,
@ -326,7 +316,7 @@ class ToolManager:
provider_name=workflow_tool.provider_id,
provider_type=workflow_tool.provider_type,
)
if runtime_parameters:
runtime_parameters = encryption_manager.decrypt_tool_parameters(runtime_parameters)
@ -373,23 +363,12 @@ class ToolManager:
if provider.startswith('__'):
continue
py_path = path.join(path.dirname(path.realpath(__file__)), 'provider', 'builtin', provider, f'{provider}.py')
spec = importlib.util.spec_from_file_location(f'core.tools.provider.builtin.{provider}.{provider}', py_path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
# load all classes
classes = [
obj for name, obj in vars(mod).items()
if isinstance(obj, type) and obj != BuiltinToolProviderController and issubclass(obj, BuiltinToolProviderController)
]
if len(classes) == 0:
raise ToolProviderNotFoundError(f'provider {provider} not found')
if len(classes) > 1:
raise ToolProviderNotFoundError(f'multiple providers found for {provider}')
# init provider
provider_class = classes[0]
provider_class = load_single_subclass_from_source(
module_name=f'core.tools.provider.builtin.{provider}.{provider}',
script_path=path.join(path.dirname(path.realpath(__file__)),
'provider', 'builtin', provider, f'{provider}.py'),
parent_type=BuiltinToolProviderController)
builtin_providers.append(provider_class())
# cache the builtin providers
@ -468,16 +447,16 @@ class ToolManager:
tenant_id: str,
) -> list[UserToolProvider]:
result_providers: dict[str, UserToolProvider] = {}
# get builtin providers
builtin_providers = ToolManager.list_builtin_providers()
# get db builtin providers
db_builtin_providers: list[BuiltinToolProvider] = db.session.query(BuiltinToolProvider). \
filter(BuiltinToolProvider.tenant_id == tenant_id).all()
find_db_builtin_provider = lambda provider: next((x for x in db_builtin_providers if x.provider == provider), None)
# append builtin providers
for provider in builtin_providers:
user_provider = ToolTransformService.builtin_provider_to_user_provider(

View File

@ -0,0 +1,62 @@
import importlib.util
import logging
import sys
from types import ModuleType
from typing import AnyStr
def import_module_from_source(
module_name: str,
py_file_path: AnyStr,
use_lazy_loader: bool = False
) -> ModuleType:
"""
Importing a module from the source file directly
"""
try:
existed_spec = importlib.util.find_spec(module_name)
if existed_spec:
spec = existed_spec
else:
# Refer to: https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
spec = importlib.util.spec_from_file_location(module_name, py_file_path)
if use_lazy_loader:
# Refer to: https://docs.python.org/3/library/importlib.html#implementing-lazy-imports
spec.loader = importlib.util.LazyLoader(spec.loader)
module = importlib.util.module_from_spec(spec)
if not existed_spec:
sys.modules[module_name] = module
spec.loader.exec_module(module)
return module
except Exception as e:
logging.exception(f'Failed to load module {module_name} from {py_file_path}: {str(e)}')
raise e
def get_subclasses_from_module(mod: ModuleType, parent_type: type) -> list[type]:
"""
Get all the subclasses of the parent type from the module
"""
classes = [x for _, x in vars(mod).items()
if isinstance(x, type) and x != parent_type and issubclass(x, parent_type)]
return classes
def load_single_subclass_from_source(
module_name: str,
script_path: AnyStr,
parent_type: type,
use_lazy_loader: bool = False,
) -> type:
"""
Load a single subclass from the source
"""
module = import_module_from_source(module_name, script_path, use_lazy_loader)
subclasses = get_subclasses_from_module(module, parent_type)
match len(subclasses):
case 1:
return subclasses[0]
case 0:
raise Exception(f'Missing subclass of {parent_type.__name__} in {script_path}')
case _:
raise Exception(f'Multiple subclasses of {parent_type.__name__} in {script_path}')

View File

@ -28,8 +28,8 @@ scikit-learn==1.2.2
sentry-sdk[flask]~=1.39.2
sympy==1.12
jieba==0.42.1
celery==5.2.7
redis~=4.5.4
celery~=5.3.6
redis[hiredis]~=5.0.3
openpyxl==3.1.2
chardet~=5.1.0
python-docx~=1.1.0

View File

@ -154,7 +154,7 @@ def test_invoke_stream_model():
model = ErnieBotLargeLanguageModel()
response = model.invoke(
model='ernie-bot',
model='ernie-3.5-8k',
credentials={
'api_key': os.environ.get('WENXIN_API_KEY'),
'secret_key': os.environ.get('WENXIN_SECRET_KEY')

View File

@ -0,0 +1,7 @@
from tests.integration_tests.utils.parent_class import ParentClass
class ChildClass(ParentClass):
def __init__(self, name: str):
super().__init__(name)
self.name = name

View File

@ -0,0 +1,7 @@
from tests.integration_tests.utils.parent_class import ParentClass
class LazyLoadChildClass(ParentClass):
def __init__(self, name: str):
super().__init__(name)
self.name = name

View File

@ -0,0 +1,6 @@
class ParentClass:
def __init__(self, name):
self.name = name
def get_name(self):
return self.name

View File

@ -0,0 +1,32 @@
import os
from core.utils.module_import_helper import load_single_subclass_from_source, import_module_from_source
from tests.integration_tests.utils.parent_class import ParentClass
def test_loading_subclass_from_source():
current_path = os.getcwd()
module = load_single_subclass_from_source(
module_name='ChildClass',
script_path=os.path.join(current_path, 'child_class.py'),
parent_type=ParentClass)
assert module and module.__name__ == 'ChildClass'
def test_load_import_module_from_source():
current_path = os.getcwd()
module = import_module_from_source(
module_name='ChildClass',
py_file_path=os.path.join(current_path, 'child_class.py'))
assert module and module.__name__ == 'ChildClass'
def test_lazy_loading_subclass_from_source():
current_path = os.getcwd()
clz = load_single_subclass_from_source(
module_name='LazyLoadChildClass',
script_path=os.path.join(current_path, 'lazy_load_class.py'),
parent_type=ParentClass,
use_lazy_loader=True)
instance = clz('dify')
assert instance.get_name() == 'dify'

View File

@ -130,7 +130,6 @@ const FileUploader = ({
}
}
const fileListCopy = fileListRef.current
return upload({
xhr: new XMLHttpRequest(),
data: formData,
@ -142,14 +141,14 @@ const FileUploader = ({
file: res,
progress: -1,
}
const index = fileListCopy.findIndex(item => item.fileID === fileItem.fileID)
fileListCopy[index] = completeFile
onFileUpdate(completeFile, 100, fileListCopy)
const index = fileListRef.current.findIndex(item => item.fileID === fileItem.fileID)
fileListRef.current[index] = completeFile
onFileUpdate(completeFile, 100, fileListRef.current)
return Promise.resolve({ ...completeFile })
})
.catch((e) => {
notify({ type: 'error', message: e?.response?.code === 'forbidden' ? e?.response?.message : t('datasetCreation.stepOne.uploader.failed') })
onFileUpdate(fileItem, -2, fileListCopy)
onFileUpdate(fileItem, -2, fileListRef.current)
return Promise.resolve({ ...fileItem })
})
.finally()

View File

@ -138,7 +138,7 @@ The text generation application offers non-session support and is ideal for tran
</Col>
<Col sticky>
<CodeGroup title="Request" tag="POST" label="/completion-messages" targetCode={`curl -X POST '${props.appDetail.api_base_url}/completion-messages' \\\n--header 'Authorization: Bearer {api_key}' \\\n--header 'Content-Type: application/json' \\\n--data-raw '{\n "inputs": {"query": "Hello, world!"},\n "response_mode": "streaming"\n "user": "abc-123"\n}'\n`}>
<CodeGroup title="Request" tag="POST" label="/completion-messages" targetCode={`curl -X POST '${props.appDetail.api_base_url}/completion-messages' \\\n--header 'Authorization: Bearer {api_key}' \\\n--header 'Content-Type: application/json' \\\n--data-raw '{\n "inputs": {"query": "Hello, world!"},\n "response_mode": "streaming",\n "user": "abc-123"\n}'\n`}>
```bash {{ title: 'cURL' }}
curl -X POST '${props.appDetail.api_base_url}/completion-messages' \

View File

@ -139,7 +139,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
</Col>
<Col sticky>
<CodeGroup title="Request" tag="POST" label="/completion-messages" targetCode={`curl -X POST '${props.appDetail.api_base_url}/completion-messages' \\\n--header 'Authorization: Bearer {api_key}' \\\n--header 'Content-Type: application/json' \\\n--data-raw '{\n "inputs": {"query": "Hello, world!"},\n "response_mode": "streaming"\n "user": "abc-123"\n}'\n`}>
<CodeGroup title="Request" tag="POST" label="/completion-messages" targetCode={`curl -X POST '${props.appDetail.api_base_url}/completion-messages' \\\n--header 'Authorization: Bearer {api_key}' \\\n--header 'Content-Type: application/json' \\\n--data-raw '{\n "inputs": {"query": "Hello, world!"},\n "response_mode": "streaming",\n "user": "abc-123"\n}'\n`}>
```bash {{ title: 'cURL' }}
curl -X POST '${props.appDetail.api_base_url}/completion-messages' \