diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index fd98db24b9..5ca6a4397d 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -4,6 +4,7 @@ on: pull_request: branches: - main + - plugins/beta paths: - api/** - docker/** @@ -47,15 +48,9 @@ jobs: - name: Run Unit tests run: poetry run -C api bash dev/pytest/pytest_unit_tests.sh - - name: Run ModelRuntime - run: poetry run -C api bash dev/pytest/pytest_model_runtime.sh - - name: Run dify config tests run: poetry run -C api python dev/pytest/pytest_config_tests.py - - name: Run Tool - run: poetry run -C api bash dev/pytest/pytest_tools.sh - - name: Run mypy run: | pushd api diff --git a/.github/workflows/build-push.yml b/.github/workflows/build-push.yml index 3240830277..fab3624a50 100644 --- a/.github/workflows/build-push.yml +++ b/.github/workflows/build-push.yml @@ -5,7 +5,7 @@ on: branches: - "main" - "deploy/dev" - - "dev/plugin-deploy" + - "plugins/beta" release: types: [published] diff --git a/.github/workflows/db-migration-test.yml b/.github/workflows/db-migration-test.yml index 3d881c4c3d..7c8263b10a 100644 --- a/.github/workflows/db-migration-test.yml +++ b/.github/workflows/db-migration-test.yml @@ -4,6 +4,7 @@ on: pull_request: branches: - main + - plugins/beta paths: - api/migrations/** - .github/workflows/db-migration-test.yml diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index e8cd0a7fe4..d874bb6ea7 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -4,6 +4,7 @@ on: pull_request: branches: - main + - plugins/beta concurrency: group: style-${{ github.head_ref || github.run_id }} @@ -66,6 +67,12 @@ jobs: with: files: web/** + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + version: 10 + run_install: false + - name: Setup NodeJS uses: actions/setup-node@v4 if: steps.changed-files.outputs.any_changed == 'true' @@ -80,7 +87,8 @@ jobs: - name: Web style check if: steps.changed-files.outputs.any_changed == 'true' - run: pnpm run lint + run: echo "${{ steps.changed-files.outputs.all_changed_files }}" | sed 's|web/||g' | xargs pnpm eslint # wait for next lint support eslint v9 + docker-compose-template: name: Docker Compose Template @@ -134,7 +142,7 @@ jobs: if: steps.changed-files.outputs.any_changed == 'true' env: BASH_SEVERITY: warning - DEFAULT_BRANCH: main + DEFAULT_BRANCH: plugins/beta GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} IGNORE_GENERATED_FILES: true IGNORE_GITIGNORED_FILES: true diff --git a/api/.env.example b/api/.env.example index c0962f06d6..06e1c51043 100644 --- a/api/.env.example +++ b/api/.env.example @@ -422,8 +422,9 @@ POSITION_PROVIDER_INCLUDES= POSITION_PROVIDER_EXCLUDES= # Plugin configuration -PLUGIN_API_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1 +PLUGIN_API_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi PLUGIN_API_URL=http://127.0.0.1:5002 +PLUGIN_DAEMON_URL=http://127.0.0.1:5002 PLUGIN_REMOTE_INSTALL_PORT=5003 PLUGIN_REMOTE_INSTALL_HOST=localhost PLUGIN_MAX_PACKAGE_SIZE=15728640 diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py index 278b1d3b8f..91d55c1252 100644 --- a/api/configs/packaging/__init__.py +++ b/api/configs/packaging/__init__.py @@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings): CURRENT_VERSION: str = Field( description="Dify version", - default="0.15.0", + default="1.0.0-beta.1", ) COMMIT_SHA: str = Field( diff --git a/api/core/model_runtime/model_providers/__base/large_language_model.py b/api/core/model_runtime/model_providers/__base/large_language_model.py index e833322f27..7f8de7cbab 100644 --- a/api/core/model_runtime/model_providers/__base/large_language_model.py +++ b/api/core/model_runtime/model_providers/__base/large_language_model.py @@ -107,11 +107,46 @@ class LargeLanguageModel(AIModel): content_list = [] usage = LLMUsage.empty_usage() system_fingerprint = None + tools_calls: list[AssistantPromptMessage.ToolCall] = [] + + def increase_tool_call(new_tool_calls: list[AssistantPromptMessage.ToolCall]): + def get_tool_call(tool_name: str): + if not tool_name: + return tools_calls[-1] + + tool_call = next( + (tool_call for tool_call in tools_calls if tool_call.function.name == tool_name), None + ) + if tool_call is None: + tool_call = AssistantPromptMessage.ToolCall( + id="", + type="", + function=AssistantPromptMessage.ToolCall.ToolCallFunction(name=tool_name, arguments=""), + ) + tools_calls.append(tool_call) + + return tool_call + + for new_tool_call in new_tool_calls: + # get tool call + tool_call = get_tool_call(new_tool_call.function.name) + # update tool call + if new_tool_call.id: + tool_call.id = new_tool_call.id + if new_tool_call.type: + tool_call.type = new_tool_call.type + if new_tool_call.function.name: + tool_call.function.name = new_tool_call.function.name + if new_tool_call.function.arguments: + tool_call.function.arguments += new_tool_call.function.arguments + for chunk in result: if isinstance(chunk.delta.message.content, str): content += chunk.delta.message.content elif isinstance(chunk.delta.message.content, list): content_list.extend(chunk.delta.message.content) + if chunk.delta.message.tool_calls: + increase_tool_call(chunk.delta.message.tool_calls) usage = chunk.delta.usage or LLMUsage.empty_usage() system_fingerprint = chunk.system_fingerprint @@ -120,7 +155,10 @@ class LargeLanguageModel(AIModel): result = LLMResult( model=model, prompt_messages=prompt_messages, - message=AssistantPromptMessage(content=content or content_list), + message=AssistantPromptMessage( + content=content or content_list, + tool_calls=tools_calls, + ), usage=usage, system_fingerprint=system_fingerprint, ) diff --git a/api/core/rag/extractor/word_extractor.py b/api/core/rag/extractor/word_extractor.py index 12f0cd182a..0a6ffaa1dd 100644 --- a/api/core/rag/extractor/word_extractor.py +++ b/api/core/rag/extractor/word_extractor.py @@ -50,7 +50,7 @@ class WordExtractor(BaseExtractor): self.web_path = self.file_path # TODO: use a better way to handle the file - self.temp_file = tempfile.NamedTemporaryFile() # noqa: SIM115 + self.temp_file = tempfile.NamedTemporaryFile() # noqa SIM115 self.temp_file.write(r.content) self.file_path = self.temp_file.name elif not os.path.isfile(self.file_path): diff --git a/api/core/tools/builtin_tool/providers/time/tools/timezone_conversion.py b/api/core/tools/builtin_tool/providers/time/tools/timezone_conversion.py index 796c38b697..f9b776b3b9 100644 --- a/api/core/tools/builtin_tool/providers/time/tools/timezone_conversion.py +++ b/api/core/tools/builtin_tool/providers/time/tools/timezone_conversion.py @@ -48,6 +48,6 @@ class TimezoneConversionTool(BuiltinTool): datetime_with_tz = input_timezone.localize(local_time) # timezone convert converted_datetime = datetime_with_tz.astimezone(output_timezone) - return converted_datetime.strftime(format=time_format) + return converted_datetime.strftime(format=time_format) # type: ignore except Exception as e: raise ToolInvokeError(str(e)) diff --git a/api/core/tools/builtin_tool/providers/webscraper/webscraper.py b/api/core/tools/builtin_tool/providers/webscraper/webscraper.py index bf2199518e..52c8370e0d 100644 --- a/api/core/tools/builtin_tool/providers/webscraper/webscraper.py +++ b/api/core/tools/builtin_tool/providers/webscraper/webscraper.py @@ -5,4 +5,7 @@ from core.tools.builtin_tool.provider import BuiltinToolProviderController class WebscraperProvider(BuiltinToolProviderController): def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> None: + """ + Validate credentials + """ pass diff --git a/api/pytest.ini b/api/pytest.ini index 993da4c9a7..b08cca5240 100644 --- a/api/pytest.ini +++ b/api/pytest.ini @@ -7,6 +7,12 @@ env = CODE_EXECUTION_API_KEY = dify-sandbox CODE_EXECUTION_ENDPOINT = http://127.0.0.1:8194 CODE_MAX_STRING_LENGTH = 80000 + PLUGIN_API_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi + PLUGIN_DAEMON_URL=http://127.0.0.1:5002 + PLUGIN_MAX_PACKAGE_SIZE=15728640 + INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1 + MARKETPLACE_ENABLED=true + MARKETPLACE_API_URL=https://marketplace.dify.ai FIRECRAWL_API_KEY = fc- FIREWORKS_API_KEY = fw_aaaaaaaaaaaaaaaaaaaa GOOGLE_API_KEY = abcdefghijklmnopqrstuvwxyz diff --git a/api/tests/integration_tests/model_runtime/__mock/plugin_daemon.py b/api/tests/integration_tests/model_runtime/__mock/plugin_daemon.py new file mode 100644 index 0000000000..6dfc01ab4c --- /dev/null +++ b/api/tests/integration_tests/model_runtime/__mock/plugin_daemon.py @@ -0,0 +1,44 @@ +import os +from collections.abc import Callable + +import pytest + +# import monkeypatch +from _pytest.monkeypatch import MonkeyPatch + +from core.plugin.manager.model import PluginModelManager +from tests.integration_tests.model_runtime.__mock.plugin_model import MockModelClass + + +def mock_plugin_daemon( + monkeypatch: MonkeyPatch, +) -> Callable[[], None]: + """ + mock openai module + + :param monkeypatch: pytest monkeypatch fixture + :return: unpatch function + """ + + def unpatch() -> None: + monkeypatch.undo() + + monkeypatch.setattr(PluginModelManager, "invoke_llm", MockModelClass.invoke_llm) + monkeypatch.setattr(PluginModelManager, "fetch_model_providers", MockModelClass.fetch_model_providers) + monkeypatch.setattr(PluginModelManager, "get_model_schema", MockModelClass.get_model_schema) + + return unpatch + + +MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" + + +@pytest.fixture +def setup_model_mock(monkeypatch): + if MOCK: + unpatch = mock_plugin_daemon(monkeypatch) + + yield + + if MOCK: + unpatch() diff --git a/api/tests/integration_tests/model_runtime/__mock/plugin_model.py b/api/tests/integration_tests/model_runtime/__mock/plugin_model.py new file mode 100644 index 0000000000..50913662e2 --- /dev/null +++ b/api/tests/integration_tests/model_runtime/__mock/plugin_model.py @@ -0,0 +1,249 @@ +import datetime +import uuid +from collections.abc import Generator, Sequence +from decimal import Decimal +from json import dumps + +# import monkeypatch +from typing import Optional + +from core.model_runtime.entities.common_entities import I18nObject +from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage +from core.model_runtime.entities.message_entities import AssistantPromptMessage, PromptMessage, PromptMessageTool +from core.model_runtime.entities.model_entities import ( + AIModelEntity, + FetchFrom, + ModelFeature, + ModelPropertyKey, + ModelType, +) +from core.model_runtime.entities.provider_entities import ConfigurateMethod, ProviderEntity +from core.plugin.entities.plugin_daemon import PluginModelProviderEntity +from core.plugin.manager.model import PluginModelManager + + +class MockModelClass(PluginModelManager): + def fetch_model_providers(self, tenant_id: str) -> Sequence[PluginModelProviderEntity]: + """ + Fetch model providers for the given tenant. + """ + return [ + PluginModelProviderEntity( + id=uuid.uuid4().hex, + created_at=datetime.datetime.now(), + updated_at=datetime.datetime.now(), + provider="openai", + tenant_id=tenant_id, + plugin_unique_identifier="langgenius/openai/openai", + plugin_id="langgenius/openai", + declaration=ProviderEntity( + provider="openai", + label=I18nObject( + en_US="OpenAI", + zh_Hans="OpenAI", + ), + description=I18nObject( + en_US="OpenAI", + zh_Hans="OpenAI", + ), + icon_small=I18nObject( + en_US="https://example.com/icon_small.png", + zh_Hans="https://example.com/icon_small.png", + ), + icon_large=I18nObject( + en_US="https://example.com/icon_large.png", + zh_Hans="https://example.com/icon_large.png", + ), + supported_model_types=[ModelType.LLM], + configurate_methods=[ConfigurateMethod.PREDEFINED_MODEL], + models=[ + AIModelEntity( + model="gpt-3.5-turbo", + label=I18nObject( + en_US="gpt-3.5-turbo", + zh_Hans="gpt-3.5-turbo", + ), + model_type=ModelType.LLM, + fetch_from=FetchFrom.PREDEFINED_MODEL, + model_properties={}, + features=[ModelFeature.TOOL_CALL, ModelFeature.MULTI_TOOL_CALL], + ), + AIModelEntity( + model="gpt-3.5-turbo-instruct", + label=I18nObject( + en_US="gpt-3.5-turbo-instruct", + zh_Hans="gpt-3.5-turbo-instruct", + ), + model_type=ModelType.LLM, + fetch_from=FetchFrom.PREDEFINED_MODEL, + model_properties={ + ModelPropertyKey.MODE: LLMMode.COMPLETION, + }, + features=[], + ), + ], + ), + ) + ] + + def get_model_schema( + self, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model_type: str, + model: str, + credentials: dict, + ) -> AIModelEntity | None: + """ + Get model schema + """ + return AIModelEntity( + model=model, + label=I18nObject( + en_US="OpenAI", + zh_Hans="OpenAI", + ), + model_type=ModelType(model_type), + fetch_from=FetchFrom.PREDEFINED_MODEL, + model_properties={}, + features=[ModelFeature.TOOL_CALL, ModelFeature.MULTI_TOOL_CALL] if model == "gpt-3.5-turbo" else [], + ) + + @staticmethod + def generate_function_call( + tools: Optional[list[PromptMessageTool]], + ) -> Optional[AssistantPromptMessage.ToolCall]: + if not tools or len(tools) == 0: + return None + function: PromptMessageTool = tools[0] + function_name = function.name + function_parameters = function.parameters + function_parameters_type = function_parameters["type"] + if function_parameters_type != "object": + return None + function_parameters_properties = function_parameters["properties"] + function_parameters_required = function_parameters["required"] + parameters = {} + for parameter_name, parameter in function_parameters_properties.items(): + if parameter_name not in function_parameters_required: + continue + parameter_type = parameter["type"] + if parameter_type == "string": + if "enum" in parameter: + if len(parameter["enum"]) == 0: + continue + parameters[parameter_name] = parameter["enum"][0] + else: + parameters[parameter_name] = "kawaii" + elif parameter_type == "integer": + parameters[parameter_name] = 114514 + elif parameter_type == "number": + parameters[parameter_name] = 1919810.0 + elif parameter_type == "boolean": + parameters[parameter_name] = True + + return AssistantPromptMessage.ToolCall( + id=str(uuid.uuid4()), + type="function", + function=AssistantPromptMessage.ToolCall.ToolCallFunction( + name=function_name, + arguments=dumps(parameters), + ), + ) + + @staticmethod + def mocked_chat_create_sync( + model: str, + prompt_messages: list[PromptMessage], + tools: Optional[list[PromptMessageTool]] = None, + ) -> LLMResult: + tool_call = MockModelClass.generate_function_call(tools=tools) + + return LLMResult( + id=str(uuid.uuid4()), + model=model, + prompt_messages=prompt_messages, + message=AssistantPromptMessage(content="elaina", tool_calls=[tool_call] if tool_call else []), + usage=LLMUsage( + prompt_tokens=2, + completion_tokens=1, + total_tokens=3, + prompt_unit_price=Decimal(0.0001), + completion_unit_price=Decimal(0.0002), + prompt_price_unit=Decimal(1), + prompt_price=Decimal(0.0001), + completion_price_unit=Decimal(1), + completion_price=Decimal(0.0002), + total_price=Decimal(0.0003), + currency="USD", + latency=0.001, + ), + ) + + @staticmethod + def mocked_chat_create_stream( + model: str, + prompt_messages: list[PromptMessage], + tools: Optional[list[PromptMessageTool]] = None, + ) -> Generator[LLMResultChunk, None, None]: + tool_call = MockModelClass.generate_function_call(tools=tools) + + full_text = "Hello, world!\n\n```python\nprint('Hello, world!')\n```" + for i in range(0, len(full_text) + 1): + if i == len(full_text): + yield LLMResultChunk( + model=model, + prompt_messages=prompt_messages, + delta=LLMResultChunkDelta( + index=0, + message=AssistantPromptMessage( + content="", + tool_calls=[tool_call] if tool_call else [], + ), + ), + ) + else: + yield LLMResultChunk( + model=model, + prompt_messages=prompt_messages, + delta=LLMResultChunkDelta( + index=0, + message=AssistantPromptMessage( + content=full_text[i], + tool_calls=[tool_call] if tool_call else [], + ), + usage=LLMUsage( + prompt_tokens=2, + completion_tokens=17, + total_tokens=19, + prompt_unit_price=Decimal(0.0001), + completion_unit_price=Decimal(0.0002), + prompt_price_unit=Decimal(1), + prompt_price=Decimal(0.0001), + completion_price_unit=Decimal(1), + completion_price=Decimal(0.0002), + total_price=Decimal(0.0003), + currency="USD", + latency=0.001, + ), + ), + ) + + def invoke_llm( + self: PluginModelManager, + *, + tenant_id: str, + user_id: str, + plugin_id: str, + provider: str, + model: str, + credentials: dict, + prompt_messages: list[PromptMessage], + model_parameters: Optional[dict] = None, + tools: Optional[list[PromptMessageTool]] = None, + stop: Optional[list[str]] = None, + stream: bool = True, + ): + return MockModelClass.mocked_chat_create_stream(model=model, prompt_messages=prompt_messages, tools=tools) diff --git a/api/tests/integration_tests/model_runtime/gpustack/test_speech2text.py b/api/tests/integration_tests/model_runtime/gpustack/test_speech2text.py deleted file mode 100644 index c215e9b739..0000000000 --- a/api/tests/integration_tests/model_runtime/gpustack/test_speech2text.py +++ /dev/null @@ -1,55 +0,0 @@ -import os -from pathlib import Path - -import pytest - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.gpustack.speech2text.speech2text import GPUStackSpeech2TextModel - - -def test_validate_credentials(): - model = GPUStackSpeech2TextModel() - - with pytest.raises(CredentialsValidateFailedError): - model.validate_credentials( - model="faster-whisper-medium", - credentials={ - "endpoint_url": "invalid_url", - "api_key": "invalid_api_key", - }, - ) - - model.validate_credentials( - model="faster-whisper-medium", - credentials={ - "endpoint_url": os.environ.get("GPUSTACK_SERVER_URL"), - "api_key": os.environ.get("GPUSTACK_API_KEY"), - }, - ) - - -def test_invoke_model(): - model = GPUStackSpeech2TextModel() - - # Get the directory of the current file - current_dir = os.path.dirname(os.path.abspath(__file__)) - - # Get assets directory - assets_dir = os.path.join(os.path.dirname(current_dir), "assets") - - # Construct the path to the audio file - audio_file_path = os.path.join(assets_dir, "audio.mp3") - - file = Path(audio_file_path).read_bytes() - - result = model.invoke( - model="faster-whisper-medium", - credentials={ - "endpoint_url": os.environ.get("GPUSTACK_SERVER_URL"), - "api_key": os.environ.get("GPUSTACK_API_KEY"), - }, - file=file, - ) - - assert isinstance(result, str) - assert result == "1, 2, 3, 4, 5, 6, 7, 8, 9, 10" diff --git a/api/tests/integration_tests/model_runtime/gpustack/test_tts.py b/api/tests/integration_tests/model_runtime/gpustack/test_tts.py deleted file mode 100644 index 8997ad074c..0000000000 --- a/api/tests/integration_tests/model_runtime/gpustack/test_tts.py +++ /dev/null @@ -1,24 +0,0 @@ -import os - -from core.model_runtime.model_providers.gpustack.tts.tts import GPUStackText2SpeechModel - - -def test_invoke_model(): - model = GPUStackText2SpeechModel() - - result = model.invoke( - model="cosyvoice-300m-sft", - tenant_id="test", - credentials={ - "endpoint_url": os.environ.get("GPUSTACK_SERVER_URL"), - "api_key": os.environ.get("GPUSTACK_API_KEY"), - }, - content_text="Hello world", - voice="Chinese Female", - ) - - content = b"" - for chunk in result: - content += chunk - - assert content != b"" diff --git a/api/tests/integration_tests/workflow/nodes/__mock/model.py b/api/tests/integration_tests/workflow/nodes/__mock/model.py new file mode 100644 index 0000000000..7c48d84d69 --- /dev/null +++ b/api/tests/integration_tests/workflow/nodes/__mock/model.py @@ -0,0 +1,50 @@ +from unittest.mock import MagicMock + +from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity +from core.entities.provider_configuration import ProviderConfiguration, ProviderModelBundle +from core.entities.provider_entities import CustomConfiguration, CustomProviderConfiguration, SystemConfiguration +from core.model_manager import ModelInstance +from core.model_runtime.entities.model_entities import ModelType +from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory +from models.provider import ProviderType + + +def get_mocked_fetch_model_config( + provider: str, + model: str, + mode: str, + credentials: dict, +): + model_provider_factory = ModelProviderFactory(tenant_id="test_tenant") + model_type_instance = model_provider_factory.get_model_type_instance(provider, ModelType.LLM) + provider_model_bundle = ProviderModelBundle( + configuration=ProviderConfiguration( + tenant_id="1", + provider=model_provider_factory.get_provider_schema(provider), + preferred_provider_type=ProviderType.CUSTOM, + using_provider_type=ProviderType.CUSTOM, + system_configuration=SystemConfiguration(enabled=False), + custom_configuration=CustomConfiguration(provider=CustomProviderConfiguration(credentials=credentials)), + model_settings=[], + ), + model_type_instance=model_type_instance, + ) + model_instance = ModelInstance(provider_model_bundle=provider_model_bundle, model=model) + model_schema = model_provider_factory.get_model_schema( + provider=provider, + model_type=model_type_instance.model_type, + model=model, + credentials=credentials, + ) + assert model_schema is not None + model_config = ModelConfigWithCredentialsEntity( + model=model, + provider=provider, + mode=mode, + credentials=credentials, + parameters={}, + model_schema=model_schema, + provider_model_bundle=provider_model_bundle, + ) + + return MagicMock(return_value=(model_instance, model_config)) diff --git a/api/tests/integration_tests/workflow/nodes/test_llm.py b/api/tests/integration_tests/workflow/nodes/test_llm.py index ca271d5362..22354df196 100644 --- a/api/tests/integration_tests/workflow/nodes/test_llm.py +++ b/api/tests/integration_tests/workflow/nodes/test_llm.py @@ -7,12 +7,7 @@ from unittest.mock import MagicMock import pytest -from core.app.entities.app_invoke_entities import InvokeFrom, ModelConfigWithCredentialsEntity -from core.entities.provider_configuration import ProviderConfiguration, ProviderModelBundle -from core.entities.provider_entities import CustomConfiguration, CustomProviderConfiguration, SystemConfiguration -from core.model_manager import ModelInstance -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.model_providers import ModelProviderFactory +from core.app.entities.app_invoke_entities import InvokeFrom from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import SystemVariableKey from core.workflow.graph_engine.entities.graph import Graph @@ -22,11 +17,11 @@ from core.workflow.nodes.event import RunCompletedEvent from core.workflow.nodes.llm.node import LLMNode from extensions.ext_database import db from models.enums import UserFrom -from models.provider import ProviderType from models.workflow import WorkflowNodeExecutionStatus, WorkflowType +from tests.integration_tests.workflow.nodes.__mock.model import get_mocked_fetch_model_config """FOR MOCK FIXTURES, DO NOT REMOVE""" -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock +from tests.integration_tests.model_runtime.__mock.plugin_daemon import setup_model_mock from tests.integration_tests.workflow.nodes.__mock.code_executor import setup_code_executor_mock @@ -81,15 +76,19 @@ def init_llm_node(config: dict) -> LLMNode: return node -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_execute_llm(setup_openai_mock): +def test_execute_llm(setup_model_mock): node = init_llm_node( config={ "id": "llm", "data": { "title": "123", "type": "llm", - "model": {"provider": "openai", "name": "gpt-3.5-turbo", "mode": "chat", "completion_params": {}}, + "model": { + "provider": "langgenius/openai/openai", + "name": "gpt-3.5-turbo", + "mode": "chat", + "completion_params": {}, + }, "prompt_template": [ {"role": "system", "text": "you are a helpful assistant.\ntoday's weather is {{#abc.output#}}."}, {"role": "user", "text": "{{#sys.query#}}"}, @@ -103,37 +102,15 @@ def test_execute_llm(setup_openai_mock): credentials = {"openai_api_key": os.environ.get("OPENAI_API_KEY")} - provider_instance = ModelProviderFactory().get_provider_instance("openai") - model_type_instance = provider_instance.get_model_instance(ModelType.LLM) - provider_model_bundle = ProviderModelBundle( - configuration=ProviderConfiguration( - tenant_id="1", - provider=provider_instance.get_provider_schema(), - preferred_provider_type=ProviderType.CUSTOM, - using_provider_type=ProviderType.CUSTOM, - system_configuration=SystemConfiguration(enabled=False), - custom_configuration=CustomConfiguration(provider=CustomProviderConfiguration(credentials=credentials)), - model_settings=[], - ), - model_type_instance=model_type_instance, - ) - model_instance = ModelInstance(provider_model_bundle=provider_model_bundle, model="gpt-3.5-turbo") - model_schema = model_type_instance.get_model_schema("gpt-3.5-turbo") - assert model_schema is not None - model_config = ModelConfigWithCredentialsEntity( - model="gpt-3.5-turbo", - provider="openai", - mode="chat", - credentials=credentials, - parameters={}, - model_schema=model_schema, - provider_model_bundle=provider_model_bundle, - ) - # Mock db.session.close() db.session.close = MagicMock() - node._fetch_model_config = MagicMock(return_value=(model_instance, model_config)) + node._fetch_model_config = get_mocked_fetch_model_config( + provider="langgenius/openai/openai", + model="gpt-3.5-turbo", + mode="chat", + credentials=credentials, + ) # execute node result = node._run() @@ -149,8 +126,7 @@ def test_execute_llm(setup_openai_mock): @pytest.mark.parametrize("setup_code_executor_mock", [["none"]], indirect=True) -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_execute_llm_with_jinja2(setup_code_executor_mock, setup_openai_mock): +def test_execute_llm_with_jinja2(setup_code_executor_mock, setup_model_mock): """ Test execute LLM node with jinja2 """ @@ -190,38 +166,15 @@ def test_execute_llm_with_jinja2(setup_code_executor_mock, setup_openai_mock): credentials = {"openai_api_key": os.environ.get("OPENAI_API_KEY")} - provider_instance = ModelProviderFactory().get_provider_instance("openai") - model_type_instance = provider_instance.get_model_instance(ModelType.LLM) - provider_model_bundle = ProviderModelBundle( - configuration=ProviderConfiguration( - tenant_id="1", - provider=provider_instance.get_provider_schema(), - preferred_provider_type=ProviderType.CUSTOM, - using_provider_type=ProviderType.CUSTOM, - system_configuration=SystemConfiguration(enabled=False), - custom_configuration=CustomConfiguration(provider=CustomProviderConfiguration(credentials=credentials)), - model_settings=[], - ), - model_type_instance=model_type_instance, - ) - - model_instance = ModelInstance(provider_model_bundle=provider_model_bundle, model="gpt-3.5-turbo") - model_schema = model_type_instance.get_model_schema("gpt-3.5-turbo") - assert model_schema is not None - model_config = ModelConfigWithCredentialsEntity( - model="gpt-3.5-turbo", - provider="openai", - mode="chat", - credentials=credentials, - parameters={}, - model_schema=model_schema, - provider_model_bundle=provider_model_bundle, - ) - # Mock db.session.close() db.session.close = MagicMock() - node._fetch_model_config = MagicMock(return_value=(model_instance, model_config)) + node._fetch_model_config = get_mocked_fetch_model_config( + provider="langgenius/openai/openai", + model="gpt-3.5-turbo", + mode="chat", + credentials=credentials, + ) # execute node result = node._run() diff --git a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py index 39aaafd981..ca055f5cc5 100644 --- a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py +++ b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py @@ -4,14 +4,7 @@ import uuid from typing import Optional from unittest.mock import MagicMock -import pytest - -from core.app.entities.app_invoke_entities import InvokeFrom, ModelConfigWithCredentialsEntity -from core.entities.provider_configuration import ProviderConfiguration, ProviderModelBundle -from core.entities.provider_entities import CustomConfiguration, CustomProviderConfiguration, SystemConfiguration -from core.model_manager import ModelInstance -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory +from core.app.entities.app_invoke_entities import InvokeFrom from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import SystemVariableKey from core.workflow.graph_engine.entities.graph import Graph @@ -20,53 +13,11 @@ from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntime from core.workflow.nodes.parameter_extractor.parameter_extractor_node import ParameterExtractorNode from extensions.ext_database import db from models.enums import UserFrom -from models.provider import ProviderType +from tests.integration_tests.workflow.nodes.__mock.model import get_mocked_fetch_model_config """FOR MOCK FIXTURES, DO NOT REMOVE""" from models.workflow import WorkflowNodeExecutionStatus, WorkflowType -from tests.integration_tests.model_runtime.__mock.anthropic import setup_anthropic_mock -from tests.integration_tests.model_runtime.__mock.openai import setup_openai_mock - - -def get_mocked_fetch_model_config( - provider: str, - model: str, - mode: str, - credentials: dict, -): - model_provider_factory = ModelProviderFactory(tenant_id="test_tenant") - model_type_instance = model_provider_factory.get_model_type_instance(provider, ModelType.LLM) - provider_model_bundle = ProviderModelBundle( - configuration=ProviderConfiguration( - tenant_id="1", - provider=model_provider_factory.get_provider_schema(provider), - preferred_provider_type=ProviderType.CUSTOM, - using_provider_type=ProviderType.CUSTOM, - system_configuration=SystemConfiguration(enabled=False), - custom_configuration=CustomConfiguration(provider=CustomProviderConfiguration(credentials=credentials)), - model_settings=[], - ), - model_type_instance=model_type_instance, - ) - model_instance = ModelInstance(provider_model_bundle=provider_model_bundle, model=model) - model_schema = model_provider_factory.get_model_schema( - provider=provider, - model_type=model_type_instance.model_type, - model=model, - credentials=credentials, - ) - assert model_schema is not None - model_config = ModelConfigWithCredentialsEntity( - model=model, - provider=provider, - mode=mode, - credentials=credentials, - parameters={}, - model_schema=model_schema, - provider_model_bundle=provider_model_bundle, - ) - - return MagicMock(return_value=(model_instance, model_config)) +from tests.integration_tests.model_runtime.__mock.plugin_daemon import setup_model_mock def get_mocked_fetch_memory(memory_text: str): @@ -133,8 +84,7 @@ def init_parameter_extractor_node(config: dict): ) -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_function_calling_parameter_extractor(setup_openai_mock): +def test_function_calling_parameter_extractor(setup_model_mock): """ Test function calling for parameter extractor. """ @@ -144,7 +94,12 @@ def test_function_calling_parameter_extractor(setup_openai_mock): "data": { "title": "123", "type": "parameter-extractor", - "model": {"provider": "openai", "name": "gpt-3.5-turbo", "mode": "chat", "completion_params": {}}, + "model": { + "provider": "langgenius/openai/openai", + "name": "gpt-3.5-turbo", + "mode": "chat", + "completion_params": {}, + }, "query": ["sys", "query"], "parameters": [{"name": "location", "type": "string", "description": "location", "required": True}], "instruction": "", @@ -155,25 +110,13 @@ def test_function_calling_parameter_extractor(setup_openai_mock): ) node._fetch_model_config = get_mocked_fetch_model_config( - provider="openai", + provider="langgenius/openai/openai", model="gpt-3.5-turbo", mode="chat", credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, ) db.session.close = MagicMock() - # construct variable pool - pool = VariablePool( - system_variables={ - SystemVariableKey.QUERY: "what's the weather in SF", - SystemVariableKey.FILES: [], - SystemVariableKey.CONVERSATION_ID: "abababa", - SystemVariableKey.USER_ID: "aaa", - }, - user_inputs={}, - environment_variables=[], - ) - result = node._run() assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED @@ -182,8 +125,7 @@ def test_function_calling_parameter_extractor(setup_openai_mock): assert result.outputs.get("__reason") == None -@pytest.mark.parametrize("setup_openai_mock", [["chat"]], indirect=True) -def test_instructions(setup_openai_mock): +def test_instructions(setup_model_mock): """ Test chat parameter extractor. """ @@ -193,7 +135,12 @@ def test_instructions(setup_openai_mock): "data": { "title": "123", "type": "parameter-extractor", - "model": {"provider": "openai", "name": "gpt-3.5-turbo", "mode": "chat", "completion_params": {}}, + "model": { + "provider": "langgenius/openai/openai", + "name": "gpt-3.5-turbo", + "mode": "chat", + "completion_params": {}, + }, "query": ["sys", "query"], "parameters": [{"name": "location", "type": "string", "description": "location", "required": True}], "reasoning_mode": "function_call", @@ -204,7 +151,7 @@ def test_instructions(setup_openai_mock): ) node._fetch_model_config = get_mocked_fetch_model_config( - provider="openai", + provider="langgenius/openai/openai", model="gpt-3.5-turbo", mode="chat", credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, @@ -228,8 +175,7 @@ def test_instructions(setup_openai_mock): assert "what's the weather in SF" in prompt.get("text") -@pytest.mark.parametrize("setup_anthropic_mock", [["none"]], indirect=True) -def test_chat_parameter_extractor(setup_anthropic_mock): +def test_chat_parameter_extractor(setup_model_mock): """ Test chat parameter extractor. """ @@ -239,7 +185,12 @@ def test_chat_parameter_extractor(setup_anthropic_mock): "data": { "title": "123", "type": "parameter-extractor", - "model": {"provider": "anthropic", "name": "claude-2", "mode": "chat", "completion_params": {}}, + "model": { + "provider": "langgenius/openai/openai", + "name": "gpt-3.5-turbo", + "mode": "chat", + "completion_params": {}, + }, "query": ["sys", "query"], "parameters": [{"name": "location", "type": "string", "description": "location", "required": True}], "reasoning_mode": "prompt", @@ -250,10 +201,10 @@ def test_chat_parameter_extractor(setup_anthropic_mock): ) node._fetch_model_config = get_mocked_fetch_model_config( - provider="anthropic", - model="claude-2", + provider="langgenius/openai/openai", + model="gpt-3.5-turbo", mode="chat", - credentials={"anthropic_api_key": os.environ.get("ANTHROPIC_API_KEY")}, + credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, ) db.session.close = MagicMock() @@ -275,8 +226,7 @@ def test_chat_parameter_extractor(setup_anthropic_mock): assert '\n{"type": "object"' in prompt.get("text") -@pytest.mark.parametrize("setup_openai_mock", [["completion"]], indirect=True) -def test_completion_parameter_extractor(setup_openai_mock): +def test_completion_parameter_extractor(setup_model_mock): """ Test completion parameter extractor. """ @@ -287,7 +237,7 @@ def test_completion_parameter_extractor(setup_openai_mock): "title": "123", "type": "parameter-extractor", "model": { - "provider": "openai", + "provider": "langgenius/openai/openai", "name": "gpt-3.5-turbo-instruct", "mode": "completion", "completion_params": {}, @@ -302,7 +252,7 @@ def test_completion_parameter_extractor(setup_openai_mock): ) node._fetch_model_config = get_mocked_fetch_model_config( - provider="openai", + provider="langgenius/openai/openai", model="gpt-3.5-turbo-instruct", mode="completion", credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, @@ -335,7 +285,7 @@ def test_extract_json_response(): "title": "123", "type": "parameter-extractor", "model": { - "provider": "openai", + "provider": "langgenius/openai/openai", "name": "gpt-3.5-turbo-instruct", "mode": "completion", "completion_params": {}, @@ -361,8 +311,7 @@ def test_extract_json_response(): assert result["location"] == "kawaii" -@pytest.mark.parametrize("setup_anthropic_mock", [["none"]], indirect=True) -def test_chat_parameter_extractor_with_memory(setup_anthropic_mock): +def test_chat_parameter_extractor_with_memory(setup_model_mock): """ Test chat parameter extractor with memory. """ @@ -372,7 +321,12 @@ def test_chat_parameter_extractor_with_memory(setup_anthropic_mock): "data": { "title": "123", "type": "parameter-extractor", - "model": {"provider": "anthropic", "name": "claude-2", "mode": "chat", "completion_params": {}}, + "model": { + "provider": "langgenius/openai/openai", + "name": "gpt-3.5-turbo", + "mode": "chat", + "completion_params": {}, + }, "query": ["sys", "query"], "parameters": [{"name": "location", "type": "string", "description": "location", "required": True}], "reasoning_mode": "prompt", @@ -383,10 +337,10 @@ def test_chat_parameter_extractor_with_memory(setup_anthropic_mock): ) node._fetch_model_config = get_mocked_fetch_model_config( - provider="anthropic", - model="claude-2", + provider="langgenius/openai/openai", + model="gpt-3.5-turbo", mode="chat", - credentials={"anthropic_api_key": os.environ.get("ANTHROPIC_API_KEY")}, + credentials={"openai_api_key": os.environ.get("OPENAI_API_KEY")}, ) node._fetch_memory = get_mocked_fetch_memory("customized memory") db.session.close = MagicMock() diff --git a/api/tests/integration_tests/workflow/nodes/test_tool.py b/api/tests/integration_tests/workflow/nodes/test_tool.py index 4068e796b7..5a569a5983 100644 --- a/api/tests/integration_tests/workflow/nodes/test_tool.py +++ b/api/tests/integration_tests/workflow/nodes/test_tool.py @@ -1,13 +1,15 @@ import time import uuid +from unittest.mock import MagicMock from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities.node_entities import NodeRunResult +from core.tools.utils.configuration import ToolParameterConfigurationManager from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import SystemVariableKey from core.workflow.graph_engine.entities.graph import Graph from core.workflow.graph_engine.entities.graph_init_params import GraphInitParams from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState +from core.workflow.nodes.event.event import RunCompletedEvent from core.workflow.nodes.tool.tool_node import ToolNode from models.enums import UserFrom from models.workflow import WorkflowNodeExecutionStatus, WorkflowType @@ -63,31 +65,28 @@ def test_tool_variable_invoke(): "data": { "title": "a", "desc": "a", - "provider_id": "maths", + "provider_id": "time", "provider_type": "builtin", - "provider_name": "maths", - "tool_name": "eval_expression", - "tool_label": "eval_expression", + "provider_name": "time", + "tool_name": "current_time", + "tool_label": "current_time", "tool_configurations": {}, - "tool_parameters": { - "expression": { - "type": "variable", - "value": ["1", "123", "args1"], - } - }, + "tool_parameters": {}, }, } ) + ToolParameterConfigurationManager.decrypt_tool_parameters = MagicMock(return_value={"format": "%Y-%m-%d %H:%M:%S"}) + node.graph_runtime_state.variable_pool.add(["1", "123", "args1"], "1+1") # execute node result = node._run() - assert isinstance(result, NodeRunResult) - assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED - assert result.outputs is not None - assert "2" in result.outputs["text"] - assert result.outputs["files"] == [] + for item in result: + if isinstance(item, RunCompletedEvent): + assert item.run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert item.run_result.outputs is not None + assert item.run_result.outputs.get("text") is not None def test_tool_mixed_invoke(): @@ -97,28 +96,25 @@ def test_tool_mixed_invoke(): "data": { "title": "a", "desc": "a", - "provider_id": "maths", + "provider_id": "time", "provider_type": "builtin", - "provider_name": "maths", - "tool_name": "eval_expression", - "tool_label": "eval_expression", - "tool_configurations": {}, - "tool_parameters": { - "expression": { - "type": "mixed", - "value": "{{#1.args1#}}", - } + "provider_name": "time", + "tool_name": "current_time", + "tool_label": "current_time", + "tool_configurations": { + "format": "%Y-%m-%d %H:%M:%S", }, + "tool_parameters": {}, }, } ) - node.graph_runtime_state.variable_pool.add(["1", "args1"], "1+1") + ToolParameterConfigurationManager.decrypt_tool_parameters = MagicMock(return_value={"format": "%Y-%m-%d %H:%M:%S"}) # execute node result = node._run() - assert isinstance(result, NodeRunResult) - assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED - assert result.outputs is not None - assert "2" in result.outputs["text"] - assert result.outputs["files"] == [] + for item in result: + if isinstance(item, RunCompletedEvent): + assert item.run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert item.run_result.outputs is not None + assert item.run_result.outputs.get("text") is not None diff --git a/api/tests/unit_tests/core/helper/test_marketplace.py b/api/tests/unit_tests/core/helper/test_marketplace.py index 51011a574a..6ccce7ac9f 100644 --- a/api/tests/unit_tests/core/helper/test_marketplace.py +++ b/api/tests/unit_tests/core/helper/test_marketplace.py @@ -2,6 +2,6 @@ from core.helper.marketplace import download_plugin_pkg def test_download_plugin_pkg(): - pkg = download_plugin_pkg("yeuoly/google:0.0.1@4ff79ee644987e5b744d9c5b7a735d459fe66f26b28724326a7834d7e459e708") + pkg = download_plugin_pkg("langgenius/bing:0.0.1@e58735424d2104f208c2bd683c5142e0332045b425927067acf432b26f3d970b") assert pkg is not None assert len(pkg) > 0 diff --git a/api/tests/unit_tests/core/prompt/test_prompt_transform.py b/api/tests/unit_tests/core/prompt/test_prompt_transform.py index 89c14463bb..16896a0c6c 100644 --- a/api/tests/unit_tests/core/prompt/test_prompt_transform.py +++ b/api/tests/unit_tests/core/prompt/test_prompt_transform.py @@ -1,52 +1,52 @@ -from unittest.mock import MagicMock +# from unittest.mock import MagicMock -from core.app.app_config.entities import ModelConfigEntity -from core.entities.provider_configuration import ProviderConfiguration, ProviderModelBundle -from core.model_runtime.entities.message_entities import UserPromptMessage -from core.model_runtime.entities.model_entities import AIModelEntity, ModelPropertyKey, ParameterRule -from core.model_runtime.entities.provider_entities import ProviderEntity -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel -from core.prompt.prompt_transform import PromptTransform +# from core.app.app_config.entities import ModelConfigEntity +# from core.entities.provider_configuration import ProviderConfiguration, ProviderModelBundle +# from core.model_runtime.entities.message_entities import UserPromptMessage +# from core.model_runtime.entities.model_entities import AIModelEntity, ModelPropertyKey, ParameterRule +# from core.model_runtime.entities.provider_entities import ProviderEntity +# from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel +# from core.prompt.prompt_transform import PromptTransform -def test__calculate_rest_token(): - model_schema_mock = MagicMock(spec=AIModelEntity) - parameter_rule_mock = MagicMock(spec=ParameterRule) - parameter_rule_mock.name = "max_tokens" - model_schema_mock.parameter_rules = [parameter_rule_mock] - model_schema_mock.model_properties = {ModelPropertyKey.CONTEXT_SIZE: 62} +# def test__calculate_rest_token(): +# model_schema_mock = MagicMock(spec=AIModelEntity) +# parameter_rule_mock = MagicMock(spec=ParameterRule) +# parameter_rule_mock.name = "max_tokens" +# model_schema_mock.parameter_rules = [parameter_rule_mock] +# model_schema_mock.model_properties = {ModelPropertyKey.CONTEXT_SIZE: 62} - large_language_model_mock = MagicMock(spec=LargeLanguageModel) - large_language_model_mock.get_num_tokens.return_value = 6 +# large_language_model_mock = MagicMock(spec=LargeLanguageModel) +# large_language_model_mock.get_num_tokens.return_value = 6 - provider_mock = MagicMock(spec=ProviderEntity) - provider_mock.provider = "openai" +# provider_mock = MagicMock(spec=ProviderEntity) +# provider_mock.provider = "openai" - provider_configuration_mock = MagicMock(spec=ProviderConfiguration) - provider_configuration_mock.provider = provider_mock - provider_configuration_mock.model_settings = None +# provider_configuration_mock = MagicMock(spec=ProviderConfiguration) +# provider_configuration_mock.provider = provider_mock +# provider_configuration_mock.model_settings = None - provider_model_bundle_mock = MagicMock(spec=ProviderModelBundle) - provider_model_bundle_mock.model_type_instance = large_language_model_mock - provider_model_bundle_mock.configuration = provider_configuration_mock +# provider_model_bundle_mock = MagicMock(spec=ProviderModelBundle) +# provider_model_bundle_mock.model_type_instance = large_language_model_mock +# provider_model_bundle_mock.configuration = provider_configuration_mock - model_config_mock = MagicMock(spec=ModelConfigEntity) - model_config_mock.model = "gpt-4" - model_config_mock.credentials = {} - model_config_mock.parameters = {"max_tokens": 50} - model_config_mock.model_schema = model_schema_mock - model_config_mock.provider_model_bundle = provider_model_bundle_mock +# model_config_mock = MagicMock(spec=ModelConfigEntity) +# model_config_mock.model = "gpt-4" +# model_config_mock.credentials = {} +# model_config_mock.parameters = {"max_tokens": 50} +# model_config_mock.model_schema = model_schema_mock +# model_config_mock.provider_model_bundle = provider_model_bundle_mock - prompt_transform = PromptTransform() +# prompt_transform = PromptTransform() - prompt_messages = [UserPromptMessage(content="Hello, how are you?")] - rest_tokens = prompt_transform._calculate_rest_token(prompt_messages, model_config_mock) +# prompt_messages = [UserPromptMessage(content="Hello, how are you?")] +# rest_tokens = prompt_transform._calculate_rest_token(prompt_messages, model_config_mock) - # Validate based on the mock configuration and expected logic - expected_rest_tokens = ( - model_schema_mock.model_properties[ModelPropertyKey.CONTEXT_SIZE] - - model_config_mock.parameters["max_tokens"] - - large_language_model_mock.get_num_tokens.return_value - ) - assert rest_tokens == expected_rest_tokens - assert rest_tokens == 6 +# # Validate based on the mock configuration and expected logic +# expected_rest_tokens = ( +# model_schema_mock.model_properties[ModelPropertyKey.CONTEXT_SIZE] +# - model_config_mock.parameters["max_tokens"] +# - large_language_model_mock.get_num_tokens.return_value +# ) +# assert rest_tokens == expected_rest_tokens +# assert rest_tokens == 6 diff --git a/api/tests/unit_tests/core/test_provider_manager.py b/api/tests/unit_tests/core/test_provider_manager.py index 44284e03d0..90d5a6f15b 100644 --- a/api/tests/unit_tests/core/test_provider_manager.py +++ b/api/tests/unit_tests/core/test_provider_manager.py @@ -1,186 +1,190 @@ -from core.entities.provider_entities import ModelSettings -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory -from core.provider_manager import ProviderManager -from models.provider import LoadBalancingModelConfig, ProviderModelSetting +# from core.entities.provider_entities import ModelSettings +# from core.model_runtime.entities.model_entities import ModelType +# from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory +# from core.provider_manager import ProviderManager +# from models.provider import LoadBalancingModelConfig, ProviderModelSetting -def test__to_model_settings(mocker): - # Get all provider entities - model_provider_factory = ModelProviderFactory("test_tenant") - provider_entities = model_provider_factory.get_providers() +# def test__to_model_settings(mocker): +# # Get all provider entities +# model_provider_factory = ModelProviderFactory("test_tenant") +# provider_entities = model_provider_factory.get_providers() - provider_entity = None - for provider in provider_entities: - if provider.provider == "openai": - provider_entity = provider +# provider_entity = None +# for provider in provider_entities: +# if provider.provider == "openai": +# provider_entity = provider - # Mocking the inputs - provider_model_settings = [ - ProviderModelSetting( - id="id", - tenant_id="tenant_id", - provider_name="openai", - model_name="gpt-4", - model_type="text-generation", - enabled=True, - load_balancing_enabled=True, - ) - ] - load_balancing_model_configs = [ - LoadBalancingModelConfig( - id="id1", - tenant_id="tenant_id", - provider_name="openai", - model_name="gpt-4", - model_type="text-generation", - name="__inherit__", - encrypted_config=None, - enabled=True, - ), - LoadBalancingModelConfig( - id="id2", - tenant_id="tenant_id", - provider_name="openai", - model_name="gpt-4", - model_type="text-generation", - name="first", - encrypted_config='{"openai_api_key": "fake_key"}', - enabled=True, - ), - ] +# # Mocking the inputs +# provider_model_settings = [ +# ProviderModelSetting( +# id="id", +# tenant_id="tenant_id", +# provider_name="openai", +# model_name="gpt-4", +# model_type="text-generation", +# enabled=True, +# load_balancing_enabled=True, +# ) +# ] +# load_balancing_model_configs = [ +# LoadBalancingModelConfig( +# id="id1", +# tenant_id="tenant_id", +# provider_name="openai", +# model_name="gpt-4", +# model_type="text-generation", +# name="__inherit__", +# encrypted_config=None, +# enabled=True, +# ), +# LoadBalancingModelConfig( +# id="id2", +# tenant_id="tenant_id", +# provider_name="openai", +# model_name="gpt-4", +# model_type="text-generation", +# name="first", +# encrypted_config='{"openai_api_key": "fake_key"}', +# enabled=True, +# ), +# ] - mocker.patch( - "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} - ) +# mocker.patch( +# "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} +# ) - provider_manager = ProviderManager() +# provider_manager = ProviderManager() - # Running the method - result = provider_manager._to_model_settings(provider_entity, provider_model_settings, load_balancing_model_configs) +# # Running the method +# result = provider_manager._to_model_settings(provider_entity, +# provider_model_settings, load_balancing_model_configs) - # Asserting that the result is as expected - assert len(result) == 1 - assert isinstance(result[0], ModelSettings) - assert result[0].model == "gpt-4" - assert result[0].model_type == ModelType.LLM - assert result[0].enabled is True - assert len(result[0].load_balancing_configs) == 2 - assert result[0].load_balancing_configs[0].name == "__inherit__" - assert result[0].load_balancing_configs[1].name == "first" +# # Asserting that the result is as expected +# assert len(result) == 1 +# assert isinstance(result[0], ModelSettings) +# assert result[0].model == "gpt-4" +# assert result[0].model_type == ModelType.LLM +# assert result[0].enabled is True +# assert len(result[0].load_balancing_configs) == 2 +# assert result[0].load_balancing_configs[0].name == "__inherit__" +# assert result[0].load_balancing_configs[1].name == "first" -def test__to_model_settings_only_one_lb(mocker): - # Get all provider entities - model_provider_factory = ModelProviderFactory("test_tenant") - provider_entities = model_provider_factory.get_providers() +# def test__to_model_settings_only_one_lb(mocker): +# # Get all provider entities +# model_provider_factory = ModelProviderFactory("test_tenant") +# provider_entities = model_provider_factory.get_providers() - provider_entity = None - for provider in provider_entities: - if provider.provider == "openai": - provider_entity = provider +# provider_entity = None +# for provider in provider_entities: +# if provider.provider == "openai": +# provider_entity = provider - # Mocking the inputs - provider_model_settings = [ - ProviderModelSetting( - id="id", - tenant_id="tenant_id", - provider_name="openai", - model_name="gpt-4", - model_type="text-generation", - enabled=True, - load_balancing_enabled=True, - ) - ] - load_balancing_model_configs = [ - LoadBalancingModelConfig( - id="id1", - tenant_id="tenant_id", - provider_name="openai", - model_name="gpt-4", - model_type="text-generation", - name="__inherit__", - encrypted_config=None, - enabled=True, - ) - ] +# # Mocking the inputs +# provider_model_settings = [ +# ProviderModelSetting( +# id="id", +# tenant_id="tenant_id", +# provider_name="openai", +# model_name="gpt-4", +# model_type="text-generation", +# enabled=True, +# load_balancing_enabled=True, +# ) +# ] +# load_balancing_model_configs = [ +# LoadBalancingModelConfig( +# id="id1", +# tenant_id="tenant_id", +# provider_name="openai", +# model_name="gpt-4", +# model_type="text-generation", +# name="__inherit__", +# encrypted_config=None, +# enabled=True, +# ) +# ] - mocker.patch( - "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} - ) +# mocker.patch( +# "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} +# ) - provider_manager = ProviderManager() +# provider_manager = ProviderManager() - # Running the method - result = provider_manager._to_model_settings(provider_entity, provider_model_settings, load_balancing_model_configs) +# # Running the method +# result = provider_manager._to_model_settings( +# provider_entity, provider_model_settings, load_balancing_model_configs) - # Asserting that the result is as expected - assert len(result) == 1 - assert isinstance(result[0], ModelSettings) - assert result[0].model == "gpt-4" - assert result[0].model_type == ModelType.LLM - assert result[0].enabled is True - assert len(result[0].load_balancing_configs) == 0 +# # Asserting that the result is as expected +# assert len(result) == 1 +# assert isinstance(result[0], ModelSettings) +# assert result[0].model == "gpt-4" +# assert result[0].model_type == ModelType.LLM +# assert result[0].enabled is True +# assert len(result[0].load_balancing_configs) == 0 -def test__to_model_settings_lb_disabled(mocker): - # Get all provider entities - model_provider_factory = ModelProviderFactory("test_tenant") - provider_entities = model_provider_factory.get_providers() +# def test__to_model_settings_lb_disabled(mocker): +# # Get all provider entities +# model_provider_factory = ModelProviderFactory("test_tenant") +# provider_entities = model_provider_factory.get_providers() - provider_entity = None - for provider in provider_entities: - if provider.provider == "openai": - provider_entity = provider +# provider_entity = None +# for provider in provider_entities: +# if provider.provider == "openai": +# provider_entity = provider - # Mocking the inputs - provider_model_settings = [ - ProviderModelSetting( - id="id", - tenant_id="tenant_id", - provider_name="openai", - model_name="gpt-4", - model_type="text-generation", - enabled=True, - load_balancing_enabled=False, - ) - ] - load_balancing_model_configs = [ - LoadBalancingModelConfig( - id="id1", - tenant_id="tenant_id", - provider_name="openai", - model_name="gpt-4", - model_type="text-generation", - name="__inherit__", - encrypted_config=None, - enabled=True, - ), - LoadBalancingModelConfig( - id="id2", - tenant_id="tenant_id", - provider_name="openai", - model_name="gpt-4", - model_type="text-generation", - name="first", - encrypted_config='{"openai_api_key": "fake_key"}', - enabled=True, - ), - ] +# # Mocking the inputs +# provider_model_settings = [ +# ProviderModelSetting( +# id="id", +# tenant_id="tenant_id", +# provider_name="openai", +# model_name="gpt-4", +# model_type="text-generation", +# enabled=True, +# load_balancing_enabled=False, +# ) +# ] +# load_balancing_model_configs = [ +# LoadBalancingModelConfig( +# id="id1", +# tenant_id="tenant_id", +# provider_name="openai", +# model_name="gpt-4", +# model_type="text-generation", +# name="__inherit__", +# encrypted_config=None, +# enabled=True, +# ), +# LoadBalancingModelConfig( +# id="id2", +# tenant_id="tenant_id", +# provider_name="openai", +# model_name="gpt-4", +# model_type="text-generation", +# name="first", +# encrypted_config='{"openai_api_key": "fake_key"}', +# enabled=True, +# ), +# ] - mocker.patch( - "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} - ) +# mocker.patch( +# "core.helper.model_provider_cache.ProviderCredentialsCache.get", +# return_value={"openai_api_key": "fake_key"} +# ) - provider_manager = ProviderManager() +# provider_manager = ProviderManager() - # Running the method - result = provider_manager._to_model_settings(provider_entity, provider_model_settings, load_balancing_model_configs) +# # Running the method +# result = provider_manager._to_model_settings(provider_entity, +# provider_model_settings, load_balancing_model_configs) - # Asserting that the result is as expected - assert len(result) == 1 - assert isinstance(result[0], ModelSettings) - assert result[0].model == "gpt-4" - assert result[0].model_type == ModelType.LLM - assert result[0].enabled is True - assert len(result[0].load_balancing_configs) == 0 +# # Asserting that the result is as expected +# assert len(result) == 1 +# assert isinstance(result[0], ModelSettings) +# assert result[0].model == "gpt-4" +# assert result[0].model_type == ModelType.LLM +# assert result[0].enabled is True +# assert len(result[0].load_balancing_configs) == 0 diff --git a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py index 7e979bcaa8..184a2625bc 100644 --- a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py @@ -3,24 +3,20 @@ from typing import Optional import pytest -from configs import dify_config from core.app.entities.app_invoke_entities import InvokeFrom, ModelConfigWithCredentialsEntity from core.entities.provider_configuration import ProviderConfiguration, ProviderModelBundle from core.entities.provider_entities import CustomConfiguration, SystemConfiguration from core.file import File, FileTransferMethod, FileType from core.model_runtime.entities.common_entities import I18nObject from core.model_runtime.entities.message_entities import ( - AssistantPromptMessage, ImagePromptMessageContent, PromptMessage, PromptMessageRole, - SystemPromptMessage, TextPromptMessageContent, UserPromptMessage, ) -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelFeature, ModelType +from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory -from core.prompt.entities.advanced_prompt_entities import MemoryConfig from core.variables import ArrayAnySegment, ArrayFileSegment, NoneSegment from core.workflow.entities.variable_pool import VariablePool from core.workflow.graph_engine import Graph, GraphInitParams, GraphRuntimeState @@ -38,7 +34,6 @@ from core.workflow.nodes.llm.node import LLMNode from models.enums import UserFrom from models.provider import ProviderType from models.workflow import WorkflowType -from tests.unit_tests.core.workflow.nodes.llm.test_scenarios import LLMNodeTestScenario class MockTokenBufferMemory: @@ -112,22 +107,21 @@ def llm_node(): @pytest.fixture def model_config(): # Create actual provider and model type instances - model_provider_factory = ModelProviderFactory() - provider_instance = model_provider_factory.get_provider_instance("openai") - model_type_instance = provider_instance.get_model_instance(ModelType.LLM) + model_provider_factory = ModelProviderFactory(tenant_id="test") + provider_instance = model_provider_factory.get_plugin_model_provider("openai") + model_type_instance = model_provider_factory.get_model_type_instance("openai", ModelType.LLM) # Create a ProviderModelBundle provider_model_bundle = ProviderModelBundle( configuration=ProviderConfiguration( tenant_id="1", - provider=provider_instance.get_provider_schema(), + provider=provider_instance, preferred_provider_type=ProviderType.CUSTOM, using_provider_type=ProviderType.CUSTOM, system_configuration=SystemConfiguration(enabled=False), custom_configuration=CustomConfiguration(provider=None), model_settings=[], ), - provider_instance=provider_instance, model_type_instance=model_type_instance, ) @@ -211,236 +205,240 @@ def test_fetch_files_with_non_existent_variable(llm_node): assert result == [] -def test_fetch_prompt_messages__vison_disabled(faker, llm_node, model_config): - prompt_template = [] - llm_node.node_data.prompt_template = prompt_template +# def test_fetch_prompt_messages__vison_disabled(faker, llm_node, model_config): +# TODO: Add test +# pass +# prompt_template = [] +# llm_node.node_data.prompt_template = prompt_template - fake_vision_detail = faker.random_element( - [ImagePromptMessageContent.DETAIL.HIGH, ImagePromptMessageContent.DETAIL.LOW] - ) - fake_remote_url = faker.url() - files = [ - File( - id="1", - tenant_id="test", - type=FileType.IMAGE, - filename="test1.jpg", - transfer_method=FileTransferMethod.REMOTE_URL, - remote_url=fake_remote_url, - storage_key="", - ) - ] +# fake_vision_detail = faker.random_element( +# [ImagePromptMessageContent.DETAIL.HIGH, ImagePromptMessageContent.DETAIL.LOW] +# ) +# fake_remote_url = faker.url() +# files = [ +# File( +# id="1", +# tenant_id="test", +# type=FileType.IMAGE, +# filename="test1.jpg", +# transfer_method=FileTransferMethod.REMOTE_URL, +# remote_url=fake_remote_url, +# storage_key="", +# ) +# ] - fake_query = faker.sentence() +# fake_query = faker.sentence() - prompt_messages, _ = llm_node._fetch_prompt_messages( - sys_query=fake_query, - sys_files=files, - context=None, - memory=None, - model_config=model_config, - prompt_template=prompt_template, - memory_config=None, - vision_enabled=False, - vision_detail=fake_vision_detail, - variable_pool=llm_node.graph_runtime_state.variable_pool, - jinja2_variables=[], - ) +# prompt_messages, _ = llm_node._fetch_prompt_messages( +# sys_query=fake_query, +# sys_files=files, +# context=None, +# memory=None, +# model_config=model_config, +# prompt_template=prompt_template, +# memory_config=None, +# vision_enabled=False, +# vision_detail=fake_vision_detail, +# variable_pool=llm_node.graph_runtime_state.variable_pool, +# jinja2_variables=[], +# ) - assert prompt_messages == [UserPromptMessage(content=fake_query)] +# assert prompt_messages == [UserPromptMessage(content=fake_query)] -def test_fetch_prompt_messages__basic(faker, llm_node, model_config): - # Setup dify config - dify_config.MULTIMODAL_SEND_FORMAT = "url" +# def test_fetch_prompt_messages__basic(faker, llm_node, model_config): +# TODO: Add test +# pass +# Setup dify config +# dify_config.MULTIMODAL_SEND_FORMAT = "url" - # Generate fake values for prompt template - fake_assistant_prompt = faker.sentence() - fake_query = faker.sentence() - fake_context = faker.sentence() - fake_window_size = faker.random_int(min=1, max=3) - fake_vision_detail = faker.random_element( - [ImagePromptMessageContent.DETAIL.HIGH, ImagePromptMessageContent.DETAIL.LOW] - ) - fake_remote_url = faker.url() +# # Generate fake values for prompt template +# fake_assistant_prompt = faker.sentence() +# fake_query = faker.sentence() +# fake_context = faker.sentence() +# fake_window_size = faker.random_int(min=1, max=3) +# fake_vision_detail = faker.random_element( +# [ImagePromptMessageContent.DETAIL.HIGH, ImagePromptMessageContent.DETAIL.LOW] +# ) +# fake_remote_url = faker.url() - # Setup mock memory with history messages - mock_history = [ - UserPromptMessage(content=faker.sentence()), - AssistantPromptMessage(content=faker.sentence()), - UserPromptMessage(content=faker.sentence()), - AssistantPromptMessage(content=faker.sentence()), - UserPromptMessage(content=faker.sentence()), - AssistantPromptMessage(content=faker.sentence()), - ] +# # Setup mock memory with history messages +# mock_history = [ +# UserPromptMessage(content=faker.sentence()), +# AssistantPromptMessage(content=faker.sentence()), +# UserPromptMessage(content=faker.sentence()), +# AssistantPromptMessage(content=faker.sentence()), +# UserPromptMessage(content=faker.sentence()), +# AssistantPromptMessage(content=faker.sentence()), +# ] - # Setup memory configuration - memory_config = MemoryConfig( - role_prefix=MemoryConfig.RolePrefix(user="Human", assistant="Assistant"), - window=MemoryConfig.WindowConfig(enabled=True, size=fake_window_size), - query_prompt_template=None, - ) +# # Setup memory configuration +# memory_config = MemoryConfig( +# role_prefix=MemoryConfig.RolePrefix(user="Human", assistant="Assistant"), +# window=MemoryConfig.WindowConfig(enabled=True, size=fake_window_size), +# query_prompt_template=None, +# ) - memory = MockTokenBufferMemory(history_messages=mock_history) +# memory = MockTokenBufferMemory(history_messages=mock_history) - # Test scenarios covering different file input combinations - test_scenarios = [ - LLMNodeTestScenario( - description="No files", - sys_query=fake_query, - sys_files=[], - features=[], - vision_enabled=False, - vision_detail=None, - window_size=fake_window_size, - prompt_template=[ - LLMNodeChatModelMessage( - text=fake_context, - role=PromptMessageRole.SYSTEM, - edition_type="basic", - ), - LLMNodeChatModelMessage( - text="{#context#}", - role=PromptMessageRole.USER, - edition_type="basic", - ), - LLMNodeChatModelMessage( - text=fake_assistant_prompt, - role=PromptMessageRole.ASSISTANT, - edition_type="basic", - ), - ], - expected_messages=[ - SystemPromptMessage(content=fake_context), - UserPromptMessage(content=fake_context), - AssistantPromptMessage(content=fake_assistant_prompt), - ] - + mock_history[fake_window_size * -2 :] - + [ - UserPromptMessage(content=fake_query), - ], - ), - LLMNodeTestScenario( - description="User files", - sys_query=fake_query, - sys_files=[ - File( - tenant_id="test", - type=FileType.IMAGE, - filename="test1.jpg", - transfer_method=FileTransferMethod.REMOTE_URL, - remote_url=fake_remote_url, - extension=".jpg", - mime_type="image/jpg", - storage_key="", - ) - ], - vision_enabled=True, - vision_detail=fake_vision_detail, - features=[ModelFeature.VISION], - window_size=fake_window_size, - prompt_template=[ - LLMNodeChatModelMessage( - text=fake_context, - role=PromptMessageRole.SYSTEM, - edition_type="basic", - ), - LLMNodeChatModelMessage( - text="{#context#}", - role=PromptMessageRole.USER, - edition_type="basic", - ), - LLMNodeChatModelMessage( - text=fake_assistant_prompt, - role=PromptMessageRole.ASSISTANT, - edition_type="basic", - ), - ], - expected_messages=[ - SystemPromptMessage(content=fake_context), - UserPromptMessage(content=fake_context), - AssistantPromptMessage(content=fake_assistant_prompt), - ] - + mock_history[fake_window_size * -2 :] - + [ - UserPromptMessage( - content=[ - TextPromptMessageContent(data=fake_query), - ImagePromptMessageContent( - url=fake_remote_url, mime_type="image/jpg", format="jpg", detail=fake_vision_detail - ), - ] - ), - ], - ), - LLMNodeTestScenario( - description="Prompt template with variable selector of File", - sys_query=fake_query, - sys_files=[], - vision_enabled=False, - vision_detail=fake_vision_detail, - features=[ModelFeature.VISION], - window_size=fake_window_size, - prompt_template=[ - LLMNodeChatModelMessage( - text="{{#input.image#}}", - role=PromptMessageRole.USER, - edition_type="basic", - ), - ], - expected_messages=[ - UserPromptMessage( - content=[ - ImagePromptMessageContent( - url=fake_remote_url, mime_type="image/jpg", format="jpg", detail=fake_vision_detail - ), - ] - ), - ] - + mock_history[fake_window_size * -2 :] - + [UserPromptMessage(content=fake_query)], - file_variables={ - "input.image": File( - tenant_id="test", - type=FileType.IMAGE, - filename="test1.jpg", - transfer_method=FileTransferMethod.REMOTE_URL, - remote_url=fake_remote_url, - extension=".jpg", - mime_type="image/jpg", - storage_key="", - ) - }, - ), - ] +# # Test scenarios covering different file input combinations +# test_scenarios = [ +# LLMNodeTestScenario( +# description="No files", +# sys_query=fake_query, +# sys_files=[], +# features=[], +# vision_enabled=False, +# vision_detail=None, +# window_size=fake_window_size, +# prompt_template=[ +# LLMNodeChatModelMessage( +# text=fake_context, +# role=PromptMessageRole.SYSTEM, +# edition_type="basic", +# ), +# LLMNodeChatModelMessage( +# text="{#context#}", +# role=PromptMessageRole.USER, +# edition_type="basic", +# ), +# LLMNodeChatModelMessage( +# text=fake_assistant_prompt, +# role=PromptMessageRole.ASSISTANT, +# edition_type="basic", +# ), +# ], +# expected_messages=[ +# SystemPromptMessage(content=fake_context), +# UserPromptMessage(content=fake_context), +# AssistantPromptMessage(content=fake_assistant_prompt), +# ] +# + mock_history[fake_window_size * -2 :] +# + [ +# UserPromptMessage(content=fake_query), +# ], +# ), +# LLMNodeTestScenario( +# description="User files", +# sys_query=fake_query, +# sys_files=[ +# File( +# tenant_id="test", +# type=FileType.IMAGE, +# filename="test1.jpg", +# transfer_method=FileTransferMethod.REMOTE_URL, +# remote_url=fake_remote_url, +# extension=".jpg", +# mime_type="image/jpg", +# storage_key="", +# ) +# ], +# vision_enabled=True, +# vision_detail=fake_vision_detail, +# features=[ModelFeature.VISION], +# window_size=fake_window_size, +# prompt_template=[ +# LLMNodeChatModelMessage( +# text=fake_context, +# role=PromptMessageRole.SYSTEM, +# edition_type="basic", +# ), +# LLMNodeChatModelMessage( +# text="{#context#}", +# role=PromptMessageRole.USER, +# edition_type="basic", +# ), +# LLMNodeChatModelMessage( +# text=fake_assistant_prompt, +# role=PromptMessageRole.ASSISTANT, +# edition_type="basic", +# ), +# ], +# expected_messages=[ +# SystemPromptMessage(content=fake_context), +# UserPromptMessage(content=fake_context), +# AssistantPromptMessage(content=fake_assistant_prompt), +# ] +# + mock_history[fake_window_size * -2 :] +# + [ +# UserPromptMessage( +# content=[ +# TextPromptMessageContent(data=fake_query), +# ImagePromptMessageContent( +# url=fake_remote_url, mime_type="image/jpg", format="jpg", detail=fake_vision_detail +# ), +# ] +# ), +# ], +# ), +# LLMNodeTestScenario( +# description="Prompt template with variable selector of File", +# sys_query=fake_query, +# sys_files=[], +# vision_enabled=False, +# vision_detail=fake_vision_detail, +# features=[ModelFeature.VISION], +# window_size=fake_window_size, +# prompt_template=[ +# LLMNodeChatModelMessage( +# text="{{#input.image#}}", +# role=PromptMessageRole.USER, +# edition_type="basic", +# ), +# ], +# expected_messages=[ +# UserPromptMessage( +# content=[ +# ImagePromptMessageContent( +# url=fake_remote_url, mime_type="image/jpg", format="jpg", detail=fake_vision_detail +# ), +# ] +# ), +# ] +# + mock_history[fake_window_size * -2 :] +# + [UserPromptMessage(content=fake_query)], +# file_variables={ +# "input.image": File( +# tenant_id="test", +# type=FileType.IMAGE, +# filename="test1.jpg", +# transfer_method=FileTransferMethod.REMOTE_URL, +# remote_url=fake_remote_url, +# extension=".jpg", +# mime_type="image/jpg", +# storage_key="", +# ) +# }, +# ), +# ] - for scenario in test_scenarios: - model_config.model_schema.features = scenario.features +# for scenario in test_scenarios: +# model_config.model_schema.features = scenario.features - for k, v in scenario.file_variables.items(): - selector = k.split(".") - llm_node.graph_runtime_state.variable_pool.add(selector, v) +# for k, v in scenario.file_variables.items(): +# selector = k.split(".") +# llm_node.graph_runtime_state.variable_pool.add(selector, v) - # Call the method under test - prompt_messages, _ = llm_node._fetch_prompt_messages( - sys_query=scenario.sys_query, - sys_files=scenario.sys_files, - context=fake_context, - memory=memory, - model_config=model_config, - prompt_template=scenario.prompt_template, - memory_config=memory_config, - vision_enabled=scenario.vision_enabled, - vision_detail=scenario.vision_detail, - variable_pool=llm_node.graph_runtime_state.variable_pool, - jinja2_variables=[], - ) +# # Call the method under test +# prompt_messages, _ = llm_node._fetch_prompt_messages( +# sys_query=scenario.sys_query, +# sys_files=scenario.sys_files, +# context=fake_context, +# memory=memory, +# model_config=model_config, +# prompt_template=scenario.prompt_template, +# memory_config=memory_config, +# vision_enabled=scenario.vision_enabled, +# vision_detail=scenario.vision_detail, +# variable_pool=llm_node.graph_runtime_state.variable_pool, +# jinja2_variables=[], +# ) - # Verify the result - assert len(prompt_messages) == len(scenario.expected_messages), f"Scenario failed: {scenario.description}" - assert ( - prompt_messages == scenario.expected_messages - ), f"Message content mismatch in scenario: {scenario.description}" +# # Verify the result +# assert len(prompt_messages) == len(scenario.expected_messages), f"Scenario failed: {scenario.description}" +# assert ( +# prompt_messages == scenario.expected_messages +# ), f"Message content mismatch in scenario: {scenario.description}" def test_handle_list_messages_basic(llm_node): diff --git a/api/tests/unit_tests/core/workflow/nodes/test_continue_on_error.py b/api/tests/unit_tests/core/workflow/nodes/test_continue_on_error.py index 2d74be9da9..ed35d8a32a 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_continue_on_error.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_continue_on_error.py @@ -126,7 +126,7 @@ class ContinueOnErrorTestHelper: }, } if default_value: - node["data"]["default_value"] = default_value + node.node_data.default_value = default_value return node @staticmethod @@ -331,55 +331,55 @@ def test_http_node_fail_branch_continue_on_error(): assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 -def test_tool_node_default_value_continue_on_error(): - """Test tool node with default value error strategy""" - graph_config = { - "edges": DEFAULT_VALUE_EDGE, - "nodes": [ - {"data": {"title": "start", "type": "start", "variables": []}, "id": "start"}, - {"data": {"title": "answer", "type": "answer", "answer": "{{#node.result#}}"}, "id": "answer"}, - ContinueOnErrorTestHelper.get_tool_node( - "default-value", [{"key": "result", "type": "string", "value": "default tool result"}] - ), - ], - } +# def test_tool_node_default_value_continue_on_error(): +# """Test tool node with default value error strategy""" +# graph_config = { +# "edges": DEFAULT_VALUE_EDGE, +# "nodes": [ +# {"data": {"title": "start", "type": "start", "variables": []}, "id": "start"}, +# {"data": {"title": "answer", "type": "answer", "answer": "{{#node.result#}}"}, "id": "answer"}, +# ContinueOnErrorTestHelper.get_tool_node( +# "default-value", [{"key": "result", "type": "string", "value": "default tool result"}] +# ), +# ], +# } - graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) - events = list(graph_engine.run()) +# graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) +# events = list(graph_engine.run()) - assert any(isinstance(e, NodeRunExceptionEvent) for e in events) - assert any( - isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "default tool result"} for e in events - ) - assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 +# assert any(isinstance(e, NodeRunExceptionEvent) for e in events) +# assert any( +# isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "default tool result"} for e in events # noqa: E501 +# ) +# assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 -def test_tool_node_fail_branch_continue_on_error(): - """Test HTTP node with fail-branch error strategy""" - graph_config = { - "edges": FAIL_BRANCH_EDGES, - "nodes": [ - {"data": {"title": "Start", "type": "start", "variables": []}, "id": "start"}, - { - "data": {"title": "success", "type": "answer", "answer": "tool execute successful"}, - "id": "success", - }, - { - "data": {"title": "error", "type": "answer", "answer": "tool execute failed"}, - "id": "error", - }, - ContinueOnErrorTestHelper.get_tool_node(), - ], - } +# def test_tool_node_fail_branch_continue_on_error(): +# """Test HTTP node with fail-branch error strategy""" +# graph_config = { +# "edges": FAIL_BRANCH_EDGES, +# "nodes": [ +# {"data": {"title": "Start", "type": "start", "variables": []}, "id": "start"}, +# { +# "data": {"title": "success", "type": "answer", "answer": "tool execute successful"}, +# "id": "success", +# }, +# { +# "data": {"title": "error", "type": "answer", "answer": "tool execute failed"}, +# "id": "error", +# }, +# ContinueOnErrorTestHelper.get_tool_node(), +# ], +# } - graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) - events = list(graph_engine.run()) +# graph_engine = ContinueOnErrorTestHelper.create_test_graph_engine(graph_config) +# events = list(graph_engine.run()) - assert any(isinstance(e, NodeRunExceptionEvent) for e in events) - assert any( - isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "tool execute failed"} for e in events - ) - assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 +# assert any(isinstance(e, NodeRunExceptionEvent) for e in events) +# assert any( +# isinstance(e, GraphRunPartialSucceededEvent) and e.outputs == {"answer": "tool execute failed"} for e in events # noqa: E501 +# ) +# assert sum(1 for e in events if isinstance(e, NodeRunStreamChunkEvent)) == 1 def test_llm_node_default_value_continue_on_error(): diff --git a/dev/pytest/pytest_config_tests.py b/dev/pytest/pytest_config_tests.py index 08adc9ebe9..11e98bb58b 100644 --- a/dev/pytest/pytest_config_tests.py +++ b/dev/pytest/pytest_config_tests.py @@ -10,6 +10,8 @@ BASE_API_AND_DOCKER_CONFIG_SET_DIFF = { "HTTP_REQUEST_MAX_CONNECT_TIMEOUT", "HTTP_REQUEST_MAX_READ_TIMEOUT", "HTTP_REQUEST_MAX_WRITE_TIMEOUT", + "INNER_API_KEY", + "INNER_API_KEY_FOR_PLUGIN", "KEYWORD_DATA_SOURCE_TYPE", "LOGIN_LOCKOUT_DURATION", "LOG_FORMAT", @@ -18,6 +20,10 @@ BASE_API_AND_DOCKER_CONFIG_SET_DIFF = { "OCI_ENDPOINT", "OCI_REGION", "OCI_SECRET_KEY", + "PLUGIN_API_KEY", + "PLUGIN_API_URL", + "PLUGIN_REMOTE_INSTALL_HOST", + "PLUGIN_REMOTE_INSTALL_PORT", "REDIS_DB", "RESEND_API_URL", "RESPECT_XFORWARD_HEADERS_ENABLED", @@ -40,6 +46,8 @@ BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF = { "HTTP_REQUEST_MAX_CONNECT_TIMEOUT", "HTTP_REQUEST_MAX_READ_TIMEOUT", "HTTP_REQUEST_MAX_WRITE_TIMEOUT", + "INNER_API_KEY", + "INNER_API_KEY_FOR_PLUGIN", "KEYWORD_DATA_SOURCE_TYPE", "LOGIN_LOCKOUT_DURATION", "LOG_FORMAT", @@ -58,6 +66,10 @@ BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF = { "PGVECTO_RS_PASSWORD", "PGVECTO_RS_PORT", "PGVECTO_RS_USER", + "PLUGIN_API_KEY", + "PLUGIN_API_URL", + "PLUGIN_REMOTE_INSTALL_HOST", + "PLUGIN_REMOTE_INSTALL_PORT", "RESPECT_XFORWARD_HEADERS_ENABLED", "SCARF_NO_ANALYTICS", "SSRF_DEFAULT_CONNECT_TIME_OUT", diff --git a/docker-legacy/docker-compose.yaml b/docker-legacy/docker-compose.yaml index c8bf382bcd..495a08110f 100644 --- a/docker-legacy/docker-compose.yaml +++ b/docker-legacy/docker-compose.yaml @@ -2,7 +2,7 @@ version: '3' services: # API service api: - image: langgenius/dify-api:0.15.0 + image: langgenius/dify-api:1.0.0-beta.1 restart: always environment: # Startup mode, 'api' starts the API server. @@ -227,7 +227,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:0.15.0 + image: langgenius/dify-api:1.0.0-beta.1 restart: always environment: CONSOLE_WEB_URL: '' @@ -397,7 +397,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:0.15.0 + image: langgenius/dify-web:1.0.0-beta.1 restart: always environment: # The base URL of console application api server, refers to the Console base URL of WEB service if console domain is diff --git a/docker/.env.example b/docker/.env.example index f85a3d94ee..02e80db9ac 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -937,7 +937,7 @@ TOP_K_MAX_VALUE=10 # Plugin Daemon Configuration # ------------------------------ -DB_PLUGIN_DATABASE=dify-plugin +DB_PLUGIN_DATABASE=dify_plugin EXPOSE_PLUGIN_DAEMON_PORT=5002 PLUGIN_DAEMON_PORT=5002 PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi @@ -958,3 +958,4 @@ ENDPOINT_URL_TEMPLATE=http://localhost/e/{hook_id} MARKETPLACE_ENABLED=true MARKETPLACE_API_URL=https://marketplace-plugin.dify.dev +FORCE_VERIFYING_SIGNATURE=true diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 0e85452f21..cc1f55a6af 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -2,24 +2,22 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:dev-plugin-deploy + image: langgenius/dify-api:1.0.0-beta.1 restart: always environment: # Use the shared environment variables. <<: *shared-api-worker-env # Startup mode, 'api' starts the API server. MODE: api + CONSOLE_API_URL: ${CONSOLE_API_URL:-http://localhost:5001} + CONSOLE_WEB_URL: ${CONSOLE_WEB_URL:-http://localhost:3000} SENTRY_DSN: ${API_SENTRY_DSN:-} SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0} SENTRY_PROFILES_SAMPLE_RATE: ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0} - PLUGIN_API_KEY: ${PLUGIN_DAEMON_KEY:-lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi} - PLUGIN_API_URL: ${PLUGIN_DAEMON_URL:-http://plugin_daemon:5002} PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} - MARKETPLACE_ENABLED: ${MARKETPLACE_ENABLED:-false} - MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace-plugin.dify.dev} - PLUGIN_REMOTE_INSTALL_PORT: ${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003} - PLUGIN_REMOTE_INSTALL_HOST: ${EXPOSE_PLUGIN_DEBUGGING_HOST:-localhost} + MARKETPLACE_ENABLED: ${MARKETPLACE_ENABLED:-true} + MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai} ENDPOINT_URL_TEMPLATE: ${ENDPOINT_URL_TEMPLATE:-http://localhost/e/{hook_id}} depends_on: - db @@ -34,7 +32,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:dev-plugin-deploy + image: langgenius/dify-api:1.0.0-beta.1 restart: always environment: # Use the shared environment variables. @@ -49,7 +47,7 @@ services: PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} MARKETPLACE_ENABLED: ${MARKETPLACE_ENABLED:-false} - MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace-plugin.dify.dev} + MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai} depends_on: - db - redis @@ -62,7 +60,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:dev-plugin-deploy + image: langgenius/dify-web:1.0.0-beta.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -71,8 +69,8 @@ services: NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0} TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000} CSP_WHITELIST: ${CSP_WHITELIST:-} - MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace-plugin.dify.dev} - MARKETPLACE_URL: ${MARKETPLACE_URL:-https://marketplace-plugin.dify.dev} + MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai} + MARKETPLACE_URL: ${MARKETPLACE_URL:-https://marketplace.dify.ai} TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-} # The postgres database. @@ -138,7 +136,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:47c8bed17c22f67bd035d0979e696cb00ca45b16-local + image: langgenius/dify-plugin-daemon:0.0.1-local restart: always environment: # Use the shared environment variables. @@ -149,10 +147,11 @@ services: MAX_PLUGIN_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} PPROF_ENABLED: ${PLUGIN_PPROF_ENABLED:-false} DIFY_INNER_API_URL: ${PLUGIN_DIFY_INNER_API_URL:-http://api:5001} - DIFY_INNER_API_KEY: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} - PLUGIN_REMOTE_INSTALLING_HOST: ${PLUGIN_DEBUGGING_HOST:-0.0.0.0} - PLUGIN_REMOTE_INSTALLING_PORT: ${PLUGIN_DEBUGGING_PORT:-5003} + DIFY_INNER_API_KEY: ${INNER_API_KEY_FOR_PLUGIN:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} + PLUGIN_REMOTE_INSTALLING_HOST: ${PLUGIN_REMOTE_INSTALL_HOST:-0.0.0.0} + PLUGIN_REMOTE_INSTALLING_PORT: ${PLUGIN_REMOTE_INSTALL_PORT:-5003} PLUGIN_WORKING_PATH: ${PLUGIN_WORKING_PATH:-/app/storage/cwd} + FORCE_VERIFYING_SIGNATURE: ${FORCE_VERIFYING_SIGNATURE:-true} ports: - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" volumes: diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index ebcf6c5c92..01879c4238 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -66,7 +66,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:47c8bed17c22f67bd035d0979e696cb00ca45b16-local + image: langgenius/dify-plugin-daemon:0.0.1-local restart: always environment: # Use the shared environment variables. @@ -87,6 +87,7 @@ services: PLUGIN_REMOTE_INSTALLING_HOST: ${PLUGIN_DEBUGGING_HOST:-0.0.0.0} PLUGIN_REMOTE_INSTALLING_PORT: ${PLUGIN_DEBUGGING_PORT:-5003} PLUGIN_WORKING_PATH: ${PLUGIN_WORKING_PATH:-/app/storage/cwd} + FORCE_VERIFYING_SIGNATURE: ${FORCE_VERIFYING_SIGNATURE:-true} ports: - "${EXPOSE_PLUGIN_DAEMON_PORT:-5002}:${PLUGIN_DAEMON_PORT:-5002}" - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 68d5097e49..71c568d746 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -5,9 +5,9 @@ # ================================================================== x-shared-env: &shared-api-worker-env - CONSOLE_API_URL: ${CONSOLE_API_URL:-} - CONSOLE_WEB_URL: ${CONSOLE_WEB_URL:-} - SERVICE_API_URL: ${SERVICE_API_URL:-} + CONSOLE_API_URL: ${CONSOLE_API_URL:-http://localhost} + CONSOLE_WEB_URL: ${CONSOLE_WEB_URL:-http://localhost} + SERVICE_API_URL: ${SERVICE_API_URL:-http://localhost} APP_API_URL: ${APP_API_URL:-} APP_WEB_URL: ${APP_WEB_URL:-} FILES_URL: ${FILES_URL:-} @@ -388,7 +388,8 @@ x-shared-env: &shared-api-worker-env CSP_WHITELIST: ${CSP_WHITELIST:-} CREATE_TIDB_SERVICE_JOB_ENABLED: ${CREATE_TIDB_SERVICE_JOB_ENABLED:-false} MAX_SUBMIT_COUNT: ${MAX_SUBMIT_COUNT:-100} - DB_PLUGIN_DATABASE: ${DB_PLUGIN_DATABASE:-dify-plugin} + TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-10} + DB_PLUGIN_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin} EXPOSE_PLUGIN_DAEMON_PORT: ${EXPOSE_PLUGIN_DAEMON_PORT:-5002} PLUGIN_DAEMON_PORT: ${PLUGIN_DAEMON_PORT:-5002} PLUGIN_DAEMON_KEY: ${PLUGIN_DAEMON_KEY:-lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi} @@ -404,12 +405,11 @@ x-shared-env: &shared-api-worker-env ENDPOINT_URL_TEMPLATE: ${ENDPOINT_URL_TEMPLATE:-http://localhost/e/{hook_id}} MARKETPLACE_ENABLED: ${MARKETPLACE_ENABLED:-true} MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace-plugin.dify.dev} - TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-10} services: # API service api: - image: langgenius/dify-api:dev-plugin-deploy + image: langgenius/dify-api:1.0.0-beta.1 restart: always environment: # Use the shared environment variables. @@ -419,14 +419,10 @@ services: SENTRY_DSN: ${API_SENTRY_DSN:-} SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0} SENTRY_PROFILES_SAMPLE_RATE: ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0} - PLUGIN_API_KEY: ${PLUGIN_DAEMON_KEY:-lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi} - PLUGIN_API_URL: ${PLUGIN_DAEMON_URL:-http://plugin_daemon:5002} PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} - MARKETPLACE_ENABLED: ${MARKETPLACE_ENABLED:-false} - MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace-plugin.dify.dev} - PLUGIN_REMOTE_INSTALL_PORT: ${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003} - PLUGIN_REMOTE_INSTALL_HOST: ${EXPOSE_PLUGIN_DEBUGGING_HOST:-localhost} + MARKETPLACE_ENABLED: ${MARKETPLACE_ENABLED:-true} + MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai} ENDPOINT_URL_TEMPLATE: ${ENDPOINT_URL_TEMPLATE:-http://localhost/e/{hook_id}} depends_on: - db @@ -441,7 +437,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:dev-plugin-deploy + image: langgenius/dify-api:1.0.0-beta.1 restart: always environment: # Use the shared environment variables. @@ -456,7 +452,7 @@ services: PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} INNER_API_KEY_FOR_PLUGIN: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} MARKETPLACE_ENABLED: ${MARKETPLACE_ENABLED:-false} - MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace-plugin.dify.dev} + MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai} depends_on: - db - redis @@ -469,7 +465,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:dev-plugin-deploy + image: langgenius/dify-web:1.0.0-beta.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -480,8 +476,8 @@ services: NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0} TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000} CSP_WHITELIST: ${CSP_WHITELIST:-} - MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace-plugin.dify.dev} - MARKETPLACE_URL: ${MARKETPLACE_URL:-https://marketplace-plugin.dify.dev} + MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai} + MARKETPLACE_URL: ${MARKETPLACE_URL:-https://marketplace.dify.ai} TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-} # The postgres database. @@ -547,7 +543,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:47c8bed17c22f67bd035d0979e696cb00ca45b16-local + image: langgenius/dify-plugin-daemon:0.0.1-local restart: always environment: # Use the shared environment variables. @@ -557,12 +553,12 @@ services: SERVER_KEY: ${PLUGIN_DAEMON_KEY:-lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi} MAX_PLUGIN_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} PPROF_ENABLED: ${PLUGIN_PPROF_ENABLED:-false} - DEBUGGING_PORT: ${PLUGIN_DEBUGGING_PORT:-5003} DIFY_INNER_API_URL: ${PLUGIN_DIFY_INNER_API_URL:-http://api:5001} - DIFY_INNER_API_KEY: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} - PLUGIN_REMOTE_INSTALLING_HOST: ${PLUGIN_DEBUGGING_HOST:-0.0.0.0} - PLUGIN_REMOTE_INSTALLING_PORT: ${PLUGIN_DEBUGGING_PORT:-5003} + DIFY_INNER_API_KEY: ${INNER_API_KEY_FOR_PLUGIN:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} + PLUGIN_REMOTE_INSTALLING_HOST: ${PLUGIN_REMOTE_INSTALL_HOST:-0.0.0.0} + PLUGIN_REMOTE_INSTALLING_PORT: ${PLUGIN_REMOTE_INSTALL_PORT:-5003} PLUGIN_WORKING_PATH: ${PLUGIN_WORKING_PATH:-/app/storage/cwd} + FORCE_VERIFYING_SIGNATURE: ${FORCE_VERIFYING_SIGNATURE:-true} ports: - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" volumes: diff --git a/docker/middleware.env.example b/docker/middleware.env.example index 31ccc8ed68..558ad46428 100644 --- a/docker/middleware.env.example +++ b/docker/middleware.env.example @@ -92,7 +92,7 @@ EXPOSE_WEAVIATE_PORT=8080 # Plugin Daemon Configuration # ------------------------------ -DB_PLUGIN_DATABASE=dify-plugin +DB_PLUGIN_DATABASE=dify_plugin EXPOSE_PLUGIN_DAEMON_PORT=5002 PLUGIN_DAEMON_PORT=5002 PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi @@ -113,3 +113,5 @@ PLUGIN_DIFY_INNER_API_URL=http://api:5001 MARKETPLACE_ENABLED=true MARKETPLACE_API_URL=https://marketplace-plugin.dify.dev + +FORCE_VERIFYING_SIGNATURE=true \ No newline at end of file diff --git a/docker/nginx/conf.d/default.conf.template b/docker/nginx/conf.d/default.conf.template index bf86c70735..7cef848127 100644 --- a/docker/nginx/conf.d/default.conf.template +++ b/docker/nginx/conf.d/default.conf.template @@ -24,6 +24,11 @@ server { include proxy.conf; } + location /explore { + proxy_pass http://web:3000; + include proxy.conf; + } + location /e { proxy_pass http://plugin_daemon:5002; include proxy.conf; diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/annotations/page.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/annotations/page.tsx index 0af2e945f3..7beb1d76bb 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/annotations/page.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/annotations/page.tsx @@ -2,7 +2,7 @@ import React from 'react' import Main from '@/app/components/app/log-annotation' import { PageType } from '@/app/components/base/features/new-feature-panel/annotation-reply/type' -export type IProps = { +export interface IProps { params: { appId: string } } diff --git a/web/app/account/avatar.tsx b/web/app/account/avatar.tsx index 8fdecc07bf..298fa65d52 100644 --- a/web/app/account/avatar.tsx +++ b/web/app/account/avatar.tsx @@ -8,7 +8,7 @@ import { logout } from '@/service/common' import { useAppContext } from '@/context/app-context' import { LogOut01 } from '@/app/components/base/icons/src/vender/line/general' -export type IAppSelector = { +export interface IAppSelector { isMobile: boolean } diff --git a/web/app/components/app/annotation/filter.tsx b/web/app/components/app/annotation/filter.tsx index d741f6de12..2f1cde22e4 100644 --- a/web/app/components/app/annotation/filter.tsx +++ b/web/app/components/app/annotation/filter.tsx @@ -6,11 +6,11 @@ import useSWR from 'swr' import Input from '@/app/components/base/input' import { fetchAnnotationsCount } from '@/service/log' -export type QueryParam = { +export interface QueryParam { keyword?: string } -type IFilterProps = { +interface IFilterProps { appId: string queryParams: QueryParam setQueryParams: (v: QueryParam) => void diff --git a/web/app/components/app/annotation/list.tsx b/web/app/components/app/annotation/list.tsx index 39a495085e..8944c04851 100644 --- a/web/app/components/app/annotation/list.tsx +++ b/web/app/components/app/annotation/list.tsx @@ -9,7 +9,7 @@ import ActionButton from '@/app/components/base/action-button' import useTimestamp from '@/hooks/use-timestamp' import cn from '@/utils/classnames' -type Props = { +interface Props { list: AnnotationItem[] onRemove: (id: string) => void onView: (item: AnnotationItem) => void diff --git a/web/app/components/app/configuration/config-prompt/conversation-history/history-panel.tsx b/web/app/components/app/configuration/config-prompt/conversation-history/history-panel.tsx index 199f9598a4..4604e43514 100644 --- a/web/app/components/app/configuration/config-prompt/conversation-history/history-panel.tsx +++ b/web/app/components/app/configuration/config-prompt/conversation-history/history-panel.tsx @@ -9,7 +9,7 @@ import { MessageClockCircle } from '@/app/components/base/icons/src/vender/solid import I18n from '@/context/i18n' import { LanguagesSupported } from '@/i18n/language' -type Props = { +interface Props { showWarning: boolean onShowEditModal: () => void } diff --git a/web/app/components/app/configuration/config-var/config-modal/field.tsx b/web/app/components/app/configuration/config-var/config-modal/field.tsx index 5052f988d7..2c8c788e5e 100644 --- a/web/app/components/app/configuration/config-var/config-modal/field.tsx +++ b/web/app/components/app/configuration/config-var/config-modal/field.tsx @@ -3,7 +3,7 @@ import type { FC } from 'react' import React from 'react' import cn from '@/utils/classnames' -type Props = { +interface Props { className?: string title: string children: JSX.Element diff --git a/web/app/components/app/configuration/config-var/config-modal/index.tsx b/web/app/components/app/configuration/config-var/config-modal/index.tsx index 85e241a203..0b18cd323e 100644 --- a/web/app/components/app/configuration/config-var/config-modal/index.tsx +++ b/web/app/components/app/configuration/config-var/config-modal/index.tsx @@ -23,7 +23,7 @@ import { DEFAULT_VALUE_MAX_LEN } from '@/config' const TEXT_MAX_LENGTH = 256 -export type IConfigModalProps = { +export interface IConfigModalProps { isCreate?: boolean payload?: InputVar isShow: boolean diff --git a/web/app/components/app/configuration/config-var/config-string/index.tsx b/web/app/components/app/configuration/config-var/config-string/index.tsx index 78f185bd85..ef3b3d8d4a 100644 --- a/web/app/components/app/configuration/config-var/config-string/index.tsx +++ b/web/app/components/app/configuration/config-var/config-string/index.tsx @@ -3,7 +3,7 @@ import type { FC } from 'react' import React, { useEffect } from 'react' import Input from '@/app/components/base/input' -export type IConfigStringProps = { +export interface IConfigStringProps { value: number | undefined maxLength: number modelId: string diff --git a/web/app/components/app/configuration/config-var/index.tsx b/web/app/components/app/configuration/config-var/index.tsx index 608451d5f2..99150899ea 100644 --- a/web/app/components/app/configuration/config-var/index.tsx +++ b/web/app/components/app/configuration/config-var/index.tsx @@ -33,7 +33,7 @@ import { InputVarType } from '@/app/components/workflow/types' export const ADD_EXTERNAL_DATA_TOOL = 'ADD_EXTERNAL_DATA_TOOL' -type ExternalDataToolParams = { +interface ExternalDataToolParams { key: string type: string index: number @@ -43,7 +43,7 @@ type ExternalDataToolParams = { icon_background?: string } -export type IConfigVarProps = { +export interface IConfigVarProps { promptVariables: PromptVariable[] readonly?: boolean onPromptVariablesChange?: (promptVariables: PromptVariable[]) => void diff --git a/web/app/components/app/configuration/config-var/select-type-item/index.tsx b/web/app/components/app/configuration/config-var/select-type-item/index.tsx index b71486b4eb..cf91b89cb1 100644 --- a/web/app/components/app/configuration/config-var/select-type-item/index.tsx +++ b/web/app/components/app/configuration/config-var/select-type-item/index.tsx @@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next' import cn from '@/utils/classnames' import type { InputVarType } from '@/app/components/workflow/types' import InputVarTypeIcon from '@/app/components/workflow/nodes/_base/components/input-var-type-icon' -export type ISelectTypeItemProps = { +export interface ISelectTypeItemProps { type: InputVarType selected: boolean onClick: () => void diff --git a/web/app/components/app/configuration/config/automatic/get-automatic-res.tsx b/web/app/components/app/configuration/config/automatic/get-automatic-res.tsx index 0a20f4b376..549421401c 100644 --- a/web/app/components/app/configuration/config/automatic/get-automatic-res.tsx +++ b/web/app/components/app/configuration/config/automatic/get-automatic-res.tsx @@ -38,7 +38,7 @@ import ModelName from '@/app/components/header/account-setting/model-provider-pa import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations' import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks' -export type IGetAutomaticResProps = { +export interface IGetAutomaticResProps { mode: AppType model: Model isShow: boolean diff --git a/web/app/components/app/configuration/dataset-config/settings-modal/index.tsx b/web/app/components/app/configuration/dataset-config/settings-modal/index.tsx index feaa08c02a..657f0bcb16 100644 --- a/web/app/components/app/configuration/dataset-config/settings-modal/index.tsx +++ b/web/app/components/app/configuration/dataset-config/settings-modal/index.tsx @@ -33,7 +33,7 @@ import { ModelTypeEnum } from '@/app/components/header/account-setting/model-pro import { fetchMembers } from '@/service/common' import type { Member } from '@/models/common' -type SettingsModalProps = { +interface SettingsModalProps { currentDataset: DataSet onCancel: () => void onSave: (newDataset: DataSet) => void diff --git a/web/app/components/app/configuration/debug/debug-with-multiple-model/chat-item.tsx b/web/app/components/app/configuration/debug/debug-with-multiple-model/chat-item.tsx index 1144c323d1..e8ec11c19a 100644 --- a/web/app/components/app/configuration/debug/debug-with-multiple-model/chat-item.tsx +++ b/web/app/components/app/configuration/debug/debug-with-multiple-model/chat-item.tsx @@ -31,7 +31,7 @@ import { useFeatures } from '@/app/components/base/features/hooks' import type { InputForm } from '@/app/components/base/chat/chat/type' import { getLastAnswer } from '@/app/components/base/chat/utils' -type ChatItemProps = { +interface ChatItemProps { modelAndParameter: ModelAndParameter } const ChatItem: FC = ({ diff --git a/web/app/components/app/configuration/debug/debug-with-multiple-model/text-generation-item.tsx b/web/app/components/app/configuration/debug/debug-with-multiple-model/text-generation-item.tsx index 57c8f83f3f..cab6bf4313 100644 --- a/web/app/components/app/configuration/debug/debug-with-multiple-model/text-generation-item.tsx +++ b/web/app/components/app/configuration/debug/debug-with-multiple-model/text-generation-item.tsx @@ -15,7 +15,7 @@ import { useEventEmitterContextContext } from '@/context/event-emitter' import { useProviderContext } from '@/context/provider-context' import { useFeatures } from '@/app/components/base/features/hooks' -type TextGenerationItemProps = { +interface TextGenerationItemProps { modelAndParameter: ModelAndParameter } const TextGenerationItem: FC = ({ diff --git a/web/app/components/app/configuration/debug/debug-with-single-model/index.tsx b/web/app/components/app/configuration/debug/debug-with-single-model/index.tsx index 2cbfe91f16..da754b81e5 100644 --- a/web/app/components/app/configuration/debug/debug-with-single-model/index.tsx +++ b/web/app/components/app/configuration/debug/debug-with-single-model/index.tsx @@ -27,10 +27,10 @@ import { useFeatures } from '@/app/components/base/features/hooks' import { getLastAnswer } from '@/app/components/base/chat/utils' import type { InputForm } from '@/app/components/base/chat/chat/type' -type DebugWithSingleModelProps = { +interface DebugWithSingleModelProps { checkCanSend?: () => boolean } -export type DebugWithSingleModelRefType = { +export interface DebugWithSingleModelRefType { handleRestart: () => void } const DebugWithSingleModel = forwardRef(({ diff --git a/web/app/components/app/configuration/debug/index.tsx b/web/app/components/app/configuration/debug/index.tsx index 480bd782ae..99632eb0d3 100644 --- a/web/app/components/app/configuration/debug/index.tsx +++ b/web/app/components/app/configuration/debug/index.tsx @@ -48,7 +48,7 @@ import PromptLogModal from '@/app/components/base/prompt-log-modal' import { useStore as useAppStore } from '@/app/components/app/store' import { useFeatures, useFeaturesStore } from '@/app/components/base/features/hooks' -type IDebug = { +interface IDebug { isAPIKeySet: boolean onSetting: () => void inputs: Inputs diff --git a/web/app/components/app/configuration/prompt-value-panel/index.tsx b/web/app/components/app/configuration/prompt-value-panel/index.tsx index a4aadc9576..39ffeaf0d0 100644 --- a/web/app/components/app/configuration/prompt-value-panel/index.tsx +++ b/web/app/components/app/configuration/prompt-value-panel/index.tsx @@ -23,7 +23,7 @@ import { DEFAULT_VALUE_MAX_LEN } from '@/config' import { useStore as useAppStore } from '@/app/components/app/store' import cn from '@/utils/classnames' -export type IPromptValuePanelProps = { +export interface IPromptValuePanelProps { appType: AppType onSend?: () => void inputs: Inputs diff --git a/web/app/components/app/configuration/tools/external-data-tool-modal.tsx b/web/app/components/app/configuration/tools/external-data-tool-modal.tsx index eefdd4514c..e1fe73ee32 100644 --- a/web/app/components/app/configuration/tools/external-data-tool-modal.tsx +++ b/web/app/components/app/configuration/tools/external-data-tool-modal.tsx @@ -21,13 +21,13 @@ import { useToastContext } from '@/app/components/base/toast' import AppIcon from '@/app/components/base/app-icon' const systemTypes = ['api'] -type ExternalDataToolModalProps = { +interface ExternalDataToolModalProps { data: ExternalDataTool onCancel: () => void onSave: (externalDataTool: ExternalDataTool) => void onValidateBeforeSave?: (externalDataTool: ExternalDataTool) => boolean } -type Provider = { +interface Provider { key: string name: string form_schema?: CodeBasedExtensionItem['form_schema'] diff --git a/web/app/components/app/duplicate-modal/index.tsx b/web/app/components/app/duplicate-modal/index.tsx index bcad1c24f2..25a5cbf6c1 100644 --- a/web/app/components/app/duplicate-modal/index.tsx +++ b/web/app/components/app/duplicate-modal/index.tsx @@ -13,7 +13,7 @@ import { useProviderContext } from '@/context/provider-context' import AppsFull from '@/app/components/billing/apps-full-in-dialog' import type { AppIconType } from '@/types/app' -export type DuplicateAppModalProps = { +export interface DuplicateAppModalProps { appName: string icon_type: AppIconType | null icon: string diff --git a/web/app/components/app/store.ts b/web/app/components/app/store.ts index 5f02f92f0d..3764895ac9 100644 --- a/web/app/components/app/store.ts +++ b/web/app/components/app/store.ts @@ -2,7 +2,7 @@ import { create } from 'zustand' import type { App, AppSSO } from '@/types/app' import type { IChatItem } from '@/app/components/base/chat/chat/type' -type State = { +interface State { appDetail?: App & Partial appSidebarExpand: string currentLogItem?: IChatItem @@ -13,7 +13,7 @@ type State = { showAppConfigureFeaturesModal: boolean } -type Action = { +interface Action { setAppDetail: (appDetail?: App & Partial) => void setAppSiderbarExpand: (state: string) => void setCurrentLogItem: (item?: IChatItem) => void diff --git a/web/app/components/app/switch-app-modal/index.tsx b/web/app/components/app/switch-app-modal/index.tsx index 5b45095251..e1fe809e10 100644 --- a/web/app/components/app/switch-app-modal/index.tsx +++ b/web/app/components/app/switch-app-modal/index.tsx @@ -25,7 +25,7 @@ import { AlertTriangle } from '@/app/components/base/icons/src/vender/solid/aler import AppIcon from '@/app/components/base/app-icon' import { useStore as useAppStore } from '@/app/components/app/store' -type SwitchAppModalProps = { +interface SwitchAppModalProps { show: boolean appDetail: App onSuccess?: () => void diff --git a/web/app/components/app/text-generate/item/index.tsx b/web/app/components/app/text-generate/item/index.tsx index ac868e6ee3..a8084af128 100644 --- a/web/app/components/app/text-generate/item/index.tsx +++ b/web/app/components/app/text-generate/item/index.tsx @@ -33,7 +33,7 @@ import { useChatContext } from '@/app/components/base/chat/chat/context' const MAX_DEPTH = 3 -export type IGenerationItemProps = { +export interface IGenerationItemProps { isWorkflow?: boolean workflowProcessData?: WorkflowProcess className?: string diff --git a/web/app/components/app/workflow-log/detail.tsx b/web/app/components/app/workflow-log/detail.tsx index 2ee9f83c54..f5cf7c3a3b 100644 --- a/web/app/components/app/workflow-log/detail.tsx +++ b/web/app/components/app/workflow-log/detail.tsx @@ -4,7 +4,7 @@ import { useTranslation } from 'react-i18next' import { RiCloseLine } from '@remixicon/react' import Run from '@/app/components/workflow/run' -type ILogDetail = { +interface ILogDetail { runID: string onClose: () => void } diff --git a/web/app/components/app/workflow-log/filter.tsx b/web/app/components/app/workflow-log/filter.tsx index 466e9b8fda..d25f938719 100644 --- a/web/app/components/app/workflow-log/filter.tsx +++ b/web/app/components/app/workflow-log/filter.tsx @@ -6,7 +6,7 @@ import type { QueryParam } from './index' import Chip from '@/app/components/base/chip' import Input from '@/app/components/base/input' -type IFilterProps = { +interface IFilterProps { queryParams: QueryParam setQueryParams: (v: QueryParam) => void } diff --git a/web/app/components/base/audio-btn/audio.player.manager.ts b/web/app/components/base/audio-btn/audio.player.manager.ts index 848aef6cba..9b3349754f 100644 --- a/web/app/components/base/audio-btn/audio.player.manager.ts +++ b/web/app/components/base/audio-btn/audio.player.manager.ts @@ -12,6 +12,7 @@ export class AudioPlayerManager { private audioPlayers: AudioPlayer | null = null private msgId: string | undefined + // eslint-disable-next-line private constructor() { } diff --git a/web/app/components/base/audio-btn/index.tsx b/web/app/components/base/audio-btn/index.tsx index 593411ed4d..40a7b96666 100644 --- a/web/app/components/base/audio-btn/index.tsx +++ b/web/app/components/base/audio-btn/index.tsx @@ -7,7 +7,7 @@ import Tooltip from '@/app/components/base/tooltip' import Loading from '@/app/components/base/loading' import { AudioPlayerManager } from '@/app/components/base/audio-btn/audio.player.manager' -type AudioBtnProps = { +interface AudioBtnProps { id?: string voice?: string value?: string diff --git a/web/app/components/base/button/add-button.tsx b/web/app/components/base/button/add-button.tsx index ab0e247d5f..0b06a493ad 100644 --- a/web/app/components/base/button/add-button.tsx +++ b/web/app/components/base/button/add-button.tsx @@ -4,7 +4,7 @@ import React from 'react' import { RiAddLine } from '@remixicon/react' import cn from '@/utils/classnames' -type Props = { +interface Props { className?: string onClick: () => void } diff --git a/web/app/components/base/chat/chat-with-history/config-panel/form-input.tsx b/web/app/components/base/chat/chat-with-history/config-panel/form-input.tsx index 9be0ff319b..2e6df72bfb 100644 --- a/web/app/components/base/chat/chat-with-history/config-panel/form-input.tsx +++ b/web/app/components/base/chat/chat-with-history/config-panel/form-input.tsx @@ -3,7 +3,7 @@ import { useTranslation } from 'react-i18next' import { memo } from 'react' import Textarea from '@/app/components/base/textarea' -type InputProps = { +interface InputProps { form: any value: string onChange: (variable: string, value: string) => void diff --git a/web/app/components/base/chat/chat-with-history/context.tsx b/web/app/components/base/chat/chat-with-history/context.tsx index 060c178993..1000c4899a 100644 --- a/web/app/components/base/chat/chat-with-history/context.tsx +++ b/web/app/components/base/chat/chat-with-history/context.tsx @@ -16,7 +16,7 @@ import type { ConversationItem, } from '@/models/share' -export type ChatWithHistoryContextValue = { +export interface ChatWithHistoryContextValue { appInfoError?: any appInfoLoading?: boolean appMeta?: AppMeta diff --git a/web/app/components/base/chat/chat-with-history/index.tsx b/web/app/components/base/chat/chat-with-history/index.tsx index 16524406d4..886bd0e7ef 100644 --- a/web/app/components/base/chat/chat-with-history/index.tsx +++ b/web/app/components/base/chat/chat-with-history/index.tsx @@ -20,7 +20,7 @@ import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints' import { checkOrSetAccessToken } from '@/app/components/share/utils' import AppUnavailable from '@/app/components/base/app-unavailable' -type ChatWithHistoryProps = { +interface ChatWithHistoryProps { className?: string } const ChatWithHistory: FC = ({ @@ -99,7 +99,7 @@ const ChatWithHistory: FC = ({ ) } -export type ChatWithHistoryWrapProps = { +export interface ChatWithHistoryWrapProps { installedAppInfo?: InstalledApp className?: string } diff --git a/web/app/components/base/chat/chat/answer/agent-content.tsx b/web/app/components/base/chat/chat/answer/agent-content.tsx index 6f03c938f1..5e71cf6526 100644 --- a/web/app/components/base/chat/chat/answer/agent-content.tsx +++ b/web/app/components/base/chat/chat/answer/agent-content.tsx @@ -8,7 +8,7 @@ import Thought from '@/app/components/base/chat/chat/thought' import { FileList } from '@/app/components/base/file-uploader' import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils' -type AgentContentProps = { +interface AgentContentProps { item: ChatItem responding?: boolean } diff --git a/web/app/components/base/chat/chat/answer/basic-content.tsx b/web/app/components/base/chat/chat/answer/basic-content.tsx index 6c8a44cf52..943262cf0c 100644 --- a/web/app/components/base/chat/chat/answer/basic-content.tsx +++ b/web/app/components/base/chat/chat/answer/basic-content.tsx @@ -4,7 +4,7 @@ import type { ChatItem } from '../../types' import { Markdown } from '@/app/components/base/markdown' import cn from '@/utils/classnames' -type BasicContentProps = { +interface BasicContentProps { item: ChatItem } const BasicContent: FC = ({ diff --git a/web/app/components/base/chat/chat/answer/index.tsx b/web/app/components/base/chat/chat/answer/index.tsx index c6d14ddead..c90a346979 100644 --- a/web/app/components/base/chat/chat/answer/index.tsx +++ b/web/app/components/base/chat/chat/answer/index.tsx @@ -23,7 +23,7 @@ import { ChevronRight } from '@/app/components/base/icons/src/vender/line/arrows import cn from '@/utils/classnames' import { FileList } from '@/app/components/base/file-uploader' -type AnswerProps = { +interface AnswerProps { item: ChatItem question: string index: number diff --git a/web/app/components/base/chat/chat/answer/operation.tsx b/web/app/components/base/chat/chat/answer/operation.tsx index dcd3df6482..9886b755b5 100644 --- a/web/app/components/base/chat/chat/answer/operation.tsx +++ b/web/app/components/base/chat/chat/answer/operation.tsx @@ -21,7 +21,7 @@ import { import Tooltip from '@/app/components/base/tooltip' import Log from '@/app/components/base/chat/chat/log' -type OperationProps = { +interface OperationProps { item: ChatItem question: string index: number diff --git a/web/app/components/base/chat/chat/hooks.ts b/web/app/components/base/chat/chat/hooks.ts index fa923ca009..869a51396c 100644 --- a/web/app/components/base/chat/chat/hooks.ts +++ b/web/app/components/base/chat/chat/hooks.ts @@ -33,7 +33,7 @@ import { } from '@/app/components/base/file-uploader/utils' type GetAbortController = (abortController: AbortController) => void -type SendCallback = { +interface SendCallback { onGetConversationMessages?: (conversationId: string, getAbortController: GetAbortController) => Promise onGetSuggestedQuestions?: (responseItemId: string, getAbortController: GetAbortController) => Promise onConversationComplete?: (conversationId: string) => void diff --git a/web/app/components/base/chat/chat/index.tsx b/web/app/components/base/chat/chat/index.tsx index e6de01252d..ad7a708d23 100644 --- a/web/app/components/base/chat/chat/index.tsx +++ b/web/app/components/base/chat/chat/index.tsx @@ -35,7 +35,7 @@ import PromptLogModal from '@/app/components/base/prompt-log-modal' import { useStore as useAppStore } from '@/app/components/app/store' import type { AppData } from '@/models/share' -export type ChatProps = { +export interface ChatProps { appData?: AppData chatList: ChatItem[] config?: ChatConfig diff --git a/web/app/components/base/chat/chat/question.tsx b/web/app/components/base/chat/chat/question.tsx index 7052c1fb5e..3df5aa215c 100644 --- a/web/app/components/base/chat/chat/question.tsx +++ b/web/app/components/base/chat/chat/question.tsx @@ -12,7 +12,7 @@ import { User } from '@/app/components/base/icons/src/public/avatar' import { Markdown } from '@/app/components/base/markdown' import { FileList } from '@/app/components/base/file-uploader' -type QuestionProps = { +interface QuestionProps { item: ChatItem questionIcon?: ReactNode theme: Theme | null | undefined diff --git a/web/app/components/base/chat/chat/thought/index.tsx b/web/app/components/base/chat/chat/thought/index.tsx index 409f83dfaa..dbadd3465e 100644 --- a/web/app/components/base/chat/chat/thought/index.tsx +++ b/web/app/components/base/chat/chat/thought/index.tsx @@ -4,7 +4,7 @@ import React from 'react' import type { ThoughtItem, ToolInfoInThought } from '../type' import ToolDetail from '@/app/components/base/chat/chat/answer/tool-detail' -export type IThoughtProps = { +export interface IThoughtProps { thought: ThoughtItem isFinished: boolean } diff --git a/web/app/components/base/chat/chat/type.ts b/web/app/components/base/chat/chat/type.ts index 7f22ba05b7..bd61ae6e97 100644 --- a/web/app/components/base/chat/chat/type.ts +++ b/web/app/components/base/chat/chat/type.ts @@ -4,13 +4,13 @@ import type { FileEntity } from '@/app/components/base/file-uploader/types' import type { InputVarType } from '@/app/components/workflow/types' import type { FileResponse } from '@/types/workflow' -export type MessageMore = { +export interface MessageMore { time: string tokens: number latency: number | string } -export type FeedbackType = { +export interface FeedbackType { rating: MessageRating content?: string | null } @@ -26,7 +26,7 @@ export type SubmitAnnotationFunc = ( export type DisplayScene = 'web' | 'console' -export type ToolInfoInThought = { +export interface ToolInfoInThought { name: string label: string input: string @@ -34,7 +34,7 @@ export type ToolInfoInThought = { isFinished: boolean } -export type ThoughtItem = { +export interface ThoughtItem { id: string tool: string // plugin or dataset. May has multi. thought: string @@ -47,7 +47,7 @@ export type ThoughtItem = { message_files?: FileEntity[] } -export type CitationItem = { +export interface CitationItem { content: string data_source_type: string dataset_name: string @@ -62,7 +62,7 @@ export type CitationItem = { word_count: number } -export type IChatItem = { +export interface IChatItem { id: string content: string citation?: CitationItem[] @@ -104,7 +104,7 @@ export type IChatItem = { nextSibling?: string } -export type Metadata = { +export interface Metadata { retriever_resources?: CitationItem[] annotation_reply: { id: string @@ -115,20 +115,20 @@ export type Metadata = { } } -export type MessageEnd = { +export interface MessageEnd { id: string metadata: Metadata files?: FileResponse[] } -export type MessageReplace = { +export interface MessageReplace { id: string task_id: string answer: string conversation_id: string } -export type AnnotationReply = { +export interface AnnotationReply { id: string task_id: string answer: string @@ -137,7 +137,7 @@ export type AnnotationReply = { annotation_author_name: string } -export type InputForm = { +export interface InputForm { type: InputVarType label: string variable: any diff --git a/web/app/components/base/chat/embedded-chatbot/config-panel/form-input.tsx b/web/app/components/base/chat/embedded-chatbot/config-panel/form-input.tsx index 9be0ff319b..2e6df72bfb 100644 --- a/web/app/components/base/chat/embedded-chatbot/config-panel/form-input.tsx +++ b/web/app/components/base/chat/embedded-chatbot/config-panel/form-input.tsx @@ -3,7 +3,7 @@ import { useTranslation } from 'react-i18next' import { memo } from 'react' import Textarea from '@/app/components/base/textarea' -type InputProps = { +interface InputProps { form: any value: string onChange: (variable: string, value: string) => void diff --git a/web/app/components/base/chat/embedded-chatbot/context.tsx b/web/app/components/base/chat/embedded-chatbot/context.tsx index f48247a691..546f9c7c70 100644 --- a/web/app/components/base/chat/embedded-chatbot/context.tsx +++ b/web/app/components/base/chat/embedded-chatbot/context.tsx @@ -15,7 +15,7 @@ import type { ConversationItem, } from '@/models/share' -export type EmbeddedChatbotContextValue = { +export interface EmbeddedChatbotContextValue { appInfoError?: any appInfoLoading?: boolean appMeta?: AppMeta diff --git a/web/app/components/base/chat/types.ts b/web/app/components/base/chat/types.ts index 05b2185d80..442cba85e2 100644 --- a/web/app/components/base/chat/types.ts +++ b/web/app/components/base/chat/types.ts @@ -14,32 +14,32 @@ export type { PromptVariable, } from '@/models/debug' -export type UserInputForm = { +export interface UserInputForm { default: string label: string required: boolean variable: string } -export type UserInputFormTextInput = { +export interface UserInputFormTextInput { 'text-input': UserInputForm & { max_length: number } } -export type UserInputFormSelect = { +export interface UserInputFormSelect { select: UserInputForm & { options: string[] } } -export type UserInputFormParagraph = { +export interface UserInputFormParagraph { paragraph: UserInputForm } export type VisionConfig = VisionSettings -export type EnableType = { +export interface EnableType { enabled: boolean } @@ -50,7 +50,7 @@ export type ChatConfig = Omit & { supportCitationHitInfo?: boolean } -export type WorkflowProcess = { +export interface WorkflowProcess { status: WorkflowRunningStatus tracing: NodeTracing[] expand?: boolean // for UI @@ -73,10 +73,10 @@ export type OnSend = (message: string, files?: FileEntity[], last_answer?: ChatI export type OnRegenerate = (chatItem: ChatItem) => void -export type Callback = { +export interface Callback { onSuccess: () => void } -export type Feedback = { +export interface Feedback { rating: 'like' | 'dislike' | null } diff --git a/web/app/components/base/features/store.ts b/web/app/components/base/features/store.ts index 2b8c3f7073..49ce0f4338 100644 --- a/web/app/components/base/features/store.ts +++ b/web/app/components/base/features/store.ts @@ -2,16 +2,16 @@ import { createStore } from 'zustand' import type { Features } from './types' import { Resolution, TransferMethod } from '@/types/app' -export type FeaturesModal = { +export interface FeaturesModal { showFeaturesModal: boolean setShowFeaturesModal: (showFeaturesModal: boolean) => void } -export type FeaturesState = { +export interface FeaturesState { features: Features } -export type FeaturesAction = { +export interface FeaturesAction { setFeatures: (features: Features) => void } diff --git a/web/app/components/base/features/types.ts b/web/app/components/base/features/types.ts index 83f876383d..c948e538d7 100644 --- a/web/app/components/base/features/types.ts +++ b/web/app/components/base/features/types.ts @@ -1,7 +1,7 @@ import type { Resolution, TransferMethod, TtsAutoPlay } from '@/types/app' import type { FileUploadConfigResponse } from '@/models/common' -export type EnabledOrDisabled = { +export interface EnabledOrDisabled { enabled?: boolean } @@ -42,7 +42,7 @@ export type FileUpload = { fileUploadConfig?: FileUploadConfigResponse } & EnabledOrDisabled -export type AnnotationReplyConfig = { +export interface AnnotationReplyConfig { enabled: boolean id?: string score_threshold?: number @@ -64,7 +64,7 @@ export enum FeatureEnum { annotationReply = 'annotationReply', } -export type Features = { +export interface Features { [FeatureEnum.moreLikeThis]?: MoreLikeThis [FeatureEnum.opening]?: OpeningStatement [FeatureEnum.suggested]?: SuggestedQuestionsAfterAnswer diff --git a/web/app/components/base/icons/assets/public/llm/Anthropic-dark.svg b/web/app/components/base/icons/assets/public/llm/Anthropic-dark.svg new file mode 100644 index 0000000000..57abb737e7 --- /dev/null +++ b/web/app/components/base/icons/assets/public/llm/Anthropic-dark.svg @@ -0,0 +1,186 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/web/app/components/base/icons/assets/public/llm/Anthropic-light.svg b/web/app/components/base/icons/assets/public/llm/Anthropic-light.svg new file mode 100644 index 0000000000..3e587ccc9e --- /dev/null +++ b/web/app/components/base/icons/assets/public/llm/Anthropic-light.svg @@ -0,0 +1,186 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/web/app/components/base/icons/src/public/llm/AnthropicDark.json b/web/app/components/base/icons/src/public/llm/AnthropicDark.json new file mode 100644 index 0000000000..4f3af3ce79 --- /dev/null +++ b/web/app/components/base/icons/src/public/llm/AnthropicDark.json @@ -0,0 +1,1046 @@ +{ + "icon": { + "type": "element", + "isRootNode": true, + "name": "svg", + "attributes": { + "width": "90", + "height": "10", + "viewBox": "0 0 90 10", + "fill": "none", + "xmlns": "http://www.w3.org/2000/svg" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Anthropic", + "clip-path": "url(#clip0_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask0_5981_49007", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_2" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector", + "d": "M89.375 -0.00195312H0V9.99805H89.375V-0.00195312Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask0_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_2" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask1_5981_49007", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_4" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_2", + "d": "M0 -0.00390625H89.375V9.99609H0V-0.00390625Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask1_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_2" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_3" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask2_5981_49007", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_12" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_3", + "d": "M0 -0.00585938H89.375V9.99414H0V-0.00585938Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask2_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_3" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_4" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask3_5981_49007", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_89" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_4", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask3_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_4" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_5" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_6" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_5", + "d": "M18.1273 6.92438L13.7773 0.15625H11.4297V9.82501H13.4321V3.05688L17.7821 9.82501H20.1297V0.15625H18.1273V6.92438Z", + "fill": "black", + "fill-opacity": "0.95" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_5" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask4_5981_49007", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_80" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_6", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask4_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_7" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_8" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_9" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_7", + "d": "M21.7969 2.02094H25.0423V9.82501H27.1139V2.02094H30.3594V0.15625H21.7969V2.02094Z", + "fill": "black", + "fill-opacity": "0.95" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_6" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask5_5981_49007", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_71" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_8", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask5_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_10" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_12" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_9", + "d": "M38.6442 4.00994H34.0871V0.15625H32.0156V9.82501H34.0871V5.87463H38.6442V9.82501H40.7156V0.15625H38.6442V4.00994Z", + "fill": "black", + "fill-opacity": "0.95" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_7" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask6_5981_49007", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_62" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_10", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask6_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_13" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_14" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_15" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_11", + "d": "M45.3376 2.02094H47.893C48.9152 2.02094 49.4539 2.39387 49.4539 3.09831C49.4539 3.80275 48.9152 4.17569 47.893 4.17569H45.3376V2.02094ZM51.5259 3.09831C51.5259 1.27506 50.186 0.15625 47.9897 0.15625H43.2656V9.82501H45.3376V6.04037H47.6443L49.7164 9.82501H52.0094L49.715 5.75211C50.8666 5.30941 51.5259 4.37721 51.5259 3.09831Z", + "fill": "black", + "fill-opacity": "0.95" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_8" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask7_5981_49007", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_53" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_12", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask7_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_16" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_17" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_18" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_13", + "d": "M57.8732 8.05653C56.2438 8.05653 55.2496 6.89631 55.2496 5.00404C55.2496 3.08416 56.2438 1.92394 57.8732 1.92394C59.4887 1.92394 60.4691 3.08416 60.4691 5.00404C60.4691 6.89631 59.4887 8.05653 57.8732 8.05653ZM57.8732 -0.00976562C55.0839 -0.00976562 53.1094 2.06206 53.1094 5.00404C53.1094 7.91841 55.0839 9.99023 57.8732 9.99023C60.6486 9.99023 62.6094 7.91841 62.6094 5.00404C62.6094 2.06206 60.6486 -0.00976562 57.8732 -0.00976562Z", + "fill": "black", + "fill-opacity": "0.95" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_9" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask8_5981_49007", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_44" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_14", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask8_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_19" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_20" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_21" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_15", + "d": "M69.1794 4.45194H66.6233V2.02094H69.1794C70.2019 2.02094 70.7407 2.43532 70.7407 3.23644C70.7407 4.03756 70.2019 4.45194 69.1794 4.45194ZM69.2762 0.15625H64.5508V9.82501H66.6233V6.31662H69.2762C71.473 6.31662 72.8133 5.15637 72.8133 3.23644C72.8133 1.3165 71.473 0.15625 69.2762 0.15625Z", + "fill": "black", + "fill-opacity": "0.95" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_10" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask9_5981_49007", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_35" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_16", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask9_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_22" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_23" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_24" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_17", + "d": "M86.8413 6.57863C86.4823 7.51786 85.7642 8.05653 84.7837 8.05653C83.1542 8.05653 82.16 6.89631 82.16 5.00404C82.16 3.08416 83.1542 1.92394 84.7837 1.92394C85.7642 1.92394 86.4823 2.46261 86.8413 3.40183H89.0369C88.4984 1.33002 86.8827 -0.00976562 84.7837 -0.00976562C81.9942 -0.00976562 80.0195 2.06206 80.0195 5.00404C80.0195 7.91841 81.9942 9.99023 84.7837 9.99023C86.8965 9.99023 88.5122 8.63664 89.0508 6.57863H86.8413Z", + "fill": "black", + "fill-opacity": "0.95" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_11" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask10_5981_49007", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_26" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_18", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask10_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_25" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_26" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_27" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_19", + "d": "M73.6484 0.15625L77.5033 9.82501H79.6172L75.7624 0.15625H73.6484Z", + "fill": "black", + "fill-opacity": "0.95" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_12" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask11_5981_49007", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_17" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_20", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask11_5981_49007)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_28" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_29" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_30" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_21", + "d": "M3.64038 5.99893L4.95938 2.60106L6.27838 5.99893H3.64038ZM3.85422 0.15625L0 9.82501H2.15505L2.9433 7.79456H6.97558L7.76371 9.82501H9.91875L6.06453 0.15625H3.85422Z", + "fill": "black", + "fill-opacity": "0.95" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "defs", + "attributes": {}, + "children": [ + { + "type": "element", + "name": "clipPath", + "attributes": { + "id": "clip0_5981_49007" + }, + "children": [ + { + "type": "element", + "name": "rect", + "attributes": { + "width": "89.375", + "height": "10", + "fill": "white" + }, + "children": [] + } + ] + } + ] + } + ] + }, + "name": "AnthropicDark" +} \ No newline at end of file diff --git a/web/app/components/base/icons/src/public/llm/AnthropicDark.tsx b/web/app/components/base/icons/src/public/llm/AnthropicDark.tsx new file mode 100644 index 0000000000..d358b0c111 --- /dev/null +++ b/web/app/components/base/icons/src/public/llm/AnthropicDark.tsx @@ -0,0 +1,16 @@ +// GENERATE BY script +// DON NOT EDIT IT MANUALLY + +import * as React from 'react' +import data from './AnthropicDark.json' +import IconBase from '@/app/components/base/icons/IconBase' +import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase' + +const Icon = React.forwardRef, Omit>(( + props, + ref, +) => ) + +Icon.displayName = 'AnthropicDark' + +export default Icon diff --git a/web/app/components/base/icons/src/public/llm/AnthropicLight.json b/web/app/components/base/icons/src/public/llm/AnthropicLight.json new file mode 100644 index 0000000000..3e84eb4dd6 --- /dev/null +++ b/web/app/components/base/icons/src/public/llm/AnthropicLight.json @@ -0,0 +1,1046 @@ +{ + "icon": { + "type": "element", + "isRootNode": true, + "name": "svg", + "attributes": { + "width": "90", + "height": "10", + "viewBox": "0 0 90 10", + "fill": "none", + "xmlns": "http://www.w3.org/2000/svg" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Anthropic", + "clip-path": "url(#clip0_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask0_5981_52010", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_2" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector", + "d": "M89.375 -0.00195312H0V9.99805H89.375V-0.00195312Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask0_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_2" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask1_5981_52010", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_4" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_2", + "d": "M0 -0.00390625H89.375V9.99609H0V-0.00390625Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask1_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_2" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_3" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask2_5981_52010", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_12" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_3", + "d": "M0 -0.00585938H89.375V9.99414H0V-0.00585938Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask2_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_3" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_4" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask3_5981_52010", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_89" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_4", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask3_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_4" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_5" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_6" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_5", + "d": "M18.1273 6.92438L13.7773 0.15625H11.4297V9.82501H13.4321V3.05688L17.7821 9.82501H20.1297V0.15625H18.1273V6.92438Z", + "fill": "white", + "fill-opacity": "0.8" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_5" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask4_5981_52010", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_80" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_6", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask4_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_7" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_8" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_9" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_7", + "d": "M21.7969 2.02094H25.0423V9.82501H27.1139V2.02094H30.3594V0.15625H21.7969V2.02094Z", + "fill": "white", + "fill-opacity": "0.8" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_6" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask5_5981_52010", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_71" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_8", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask5_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_10" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_12" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_9", + "d": "M38.6442 4.00994H34.0871V0.15625H32.0156V9.82501H34.0871V5.87463H38.6442V9.82501H40.7156V0.15625H38.6442V4.00994Z", + "fill": "white", + "fill-opacity": "0.8" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_7" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask6_5981_52010", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_62" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_10", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask6_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_13" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_14" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_15" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_11", + "d": "M45.3376 2.02094H47.893C48.9152 2.02094 49.4539 2.39387 49.4539 3.09831C49.4539 3.80275 48.9152 4.17569 47.893 4.17569H45.3376V2.02094ZM51.5259 3.09831C51.5259 1.27506 50.186 0.15625 47.9897 0.15625H43.2656V9.82501H45.3376V6.04037H47.6443L49.7164 9.82501H52.0094L49.715 5.75211C50.8666 5.30941 51.5259 4.37721 51.5259 3.09831Z", + "fill": "white", + "fill-opacity": "0.8" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_8" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask7_5981_52010", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_53" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_12", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask7_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_16" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_17" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_18" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_13", + "d": "M57.8732 8.05653C56.2438 8.05653 55.2496 6.89631 55.2496 5.00404C55.2496 3.08416 56.2438 1.92394 57.8732 1.92394C59.4887 1.92394 60.4691 3.08416 60.4691 5.00404C60.4691 6.89631 59.4887 8.05653 57.8732 8.05653ZM57.8732 -0.00976562C55.0839 -0.00976562 53.1094 2.06206 53.1094 5.00404C53.1094 7.91841 55.0839 9.99023 57.8732 9.99023C60.6486 9.99023 62.6094 7.91841 62.6094 5.00404C62.6094 2.06206 60.6486 -0.00976562 57.8732 -0.00976562Z", + "fill": "white", + "fill-opacity": "0.8" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_9" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask8_5981_52010", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_44" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_14", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask8_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_19" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_20" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_21" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_15", + "d": "M69.1794 4.45194H66.6233V2.02094H69.1794C70.2019 2.02094 70.7407 2.43532 70.7407 3.23644C70.7407 4.03756 70.2019 4.45194 69.1794 4.45194ZM69.2762 0.15625H64.5508V9.82501H66.6233V6.31662H69.2762C71.473 6.31662 72.8133 5.15637 72.8133 3.23644C72.8133 1.3165 71.473 0.15625 69.2762 0.15625Z", + "fill": "white", + "fill-opacity": "0.8" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_10" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask9_5981_52010", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_35" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_16", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask9_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_22" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_23" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_24" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_17", + "d": "M86.8413 6.57863C86.4823 7.51786 85.7642 8.05653 84.7837 8.05653C83.1542 8.05653 82.16 6.89631 82.16 5.00404C82.16 3.08416 83.1542 1.92394 84.7837 1.92394C85.7642 1.92394 86.4823 2.46261 86.8413 3.40183H89.0369C88.4984 1.33002 86.8827 -0.00976562 84.7837 -0.00976562C81.9942 -0.00976562 80.0195 2.06206 80.0195 5.00404C80.0195 7.91841 81.9942 9.99023 84.7837 9.99023C86.8965 9.99023 88.5122 8.63664 89.0508 6.57863H86.8413Z", + "fill": "white", + "fill-opacity": "0.8" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_11" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask10_5981_52010", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_26" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_18", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask10_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_25" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_26" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_27" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_19", + "d": "M73.6484 0.15625L77.5033 9.82501H79.6172L75.7624 0.15625H73.6484Z", + "fill": "white", + "fill-opacity": "0.8" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "id": "Clip path group_12" + }, + "children": [ + { + "type": "element", + "name": "mask", + "attributes": { + "id": "mask11_5981_52010", + "style": "mask-type:luminance", + "maskUnits": "userSpaceOnUse", + "x": "0", + "y": "-1", + "width": "90", + "height": "11" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "__lottie_element_17" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_20", + "d": "M0 -0.0078125H89.375V9.99219H0V-0.0078125Z", + "fill": "white" + }, + "children": [] + } + ] + } + ] + }, + { + "type": "element", + "name": "g", + "attributes": { + "mask": "url(#mask11_5981_52010)" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_28" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_29" + }, + "children": [ + { + "type": "element", + "name": "g", + "attributes": { + "id": "Group_30" + }, + "children": [ + { + "type": "element", + "name": "path", + "attributes": { + "id": "Vector_21", + "d": "M3.64038 5.99893L4.95938 2.60106L6.27838 5.99893H3.64038ZM3.85422 0.15625L0 9.82501H2.15505L2.9433 7.79456H6.97558L7.76371 9.82501H9.91875L6.06453 0.15625H3.85422Z", + "fill": "white", + "fill-opacity": "0.8" + }, + "children": [] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "element", + "name": "defs", + "attributes": {}, + "children": [ + { + "type": "element", + "name": "clipPath", + "attributes": { + "id": "clip0_5981_52010" + }, + "children": [ + { + "type": "element", + "name": "rect", + "attributes": { + "width": "89.375", + "height": "10", + "fill": "white" + }, + "children": [] + } + ] + } + ] + } + ] + }, + "name": "AnthropicLight" +} \ No newline at end of file diff --git a/web/app/components/base/icons/src/public/llm/AnthropicLight.tsx b/web/app/components/base/icons/src/public/llm/AnthropicLight.tsx new file mode 100644 index 0000000000..34df60f28c --- /dev/null +++ b/web/app/components/base/icons/src/public/llm/AnthropicLight.tsx @@ -0,0 +1,16 @@ +// GENERATE BY script +// DON NOT EDIT IT MANUALLY + +import * as React from 'react' +import data from './AnthropicLight.json' +import IconBase from '@/app/components/base/icons/IconBase' +import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase' + +const Icon = React.forwardRef, Omit>(( + props, + ref, +) => ) + +Icon.displayName = 'AnthropicLight' + +export default Icon diff --git a/web/app/components/base/icons/src/public/llm/index.ts b/web/app/components/base/icons/src/public/llm/index.ts index 3545049795..cc9b531ebf 100644 --- a/web/app/components/base/icons/src/public/llm/index.ts +++ b/web/app/components/base/icons/src/public/llm/index.ts @@ -1,3 +1,5 @@ +export { default as AnthropicDark } from './AnthropicDark' +export { default as AnthropicLight } from './AnthropicLight' export { default as AnthropicText } from './AnthropicText' export { default as Anthropic } from './Anthropic' export { default as AzureOpenaiServiceText } from './AzureOpenaiServiceText' diff --git a/web/app/components/base/image-uploader/image-preview.tsx b/web/app/components/base/image-uploader/image-preview.tsx index 084d20c62b..1a11c91275 100644 --- a/web/app/components/base/image-uploader/image-preview.tsx +++ b/web/app/components/base/image-uploader/image-preview.tsx @@ -7,7 +7,7 @@ import { useHotkeys } from 'react-hotkeys-hook' import Tooltip from '@/app/components/base/tooltip' import Toast from '@/app/components/base/toast' -type ImagePreviewProps = { +interface ImagePreviewProps { url: string title: string onCancel: () => void diff --git a/web/app/components/base/markdown.tsx b/web/app/components/base/markdown.tsx index 2bd3ec954c..abb9a546ca 100644 --- a/web/app/components/base/markdown.tsx +++ b/web/app/components/base/markdown.tsx @@ -9,7 +9,7 @@ import RemarkGfm from 'remark-gfm' import RehypeRaw from 'rehype-raw' import SyntaxHighlighter from 'react-syntax-highlighter' import { atelierHeathLight } from 'react-syntax-highlighter/dist/esm/styles/hljs' -import { Component, createContext, memo, useContext, useMemo, useRef, useState } from 'react' +import { Component, createContext, memo, useContext, useEffect, useMemo, useRef, useState } from 'react' import cn from '@/utils/classnames' import CopyBtn from '@/app/components/base/copy-btn' import SVGBtn from '@/app/components/base/svg' diff --git a/web/app/components/base/message-log-modal/index.tsx b/web/app/components/base/message-log-modal/index.tsx index 7bbd3f311d..13667f1dc7 100644 --- a/web/app/components/base/message-log-modal/index.tsx +++ b/web/app/components/base/message-log-modal/index.tsx @@ -7,7 +7,7 @@ import cn from '@/utils/classnames' import type { IChatItem } from '@/app/components/base/chat/chat/type' import Run from '@/app/components/workflow/run' -type MessageLogModalProps = { +interface MessageLogModalProps { currentLogItem?: IChatItem defaultTab?: string width: number diff --git a/web/app/components/base/prompt-editor/index.tsx b/web/app/components/base/prompt-editor/index.tsx index 4a718527b8..8e13a7850a 100644 --- a/web/app/components/base/prompt-editor/index.tsx +++ b/web/app/components/base/prompt-editor/index.tsx @@ -61,7 +61,7 @@ import { import { useEventEmitterContextContext } from '@/context/event-emitter' import cn from '@/utils/classnames' -export type PromptEditorProps = { +export interface PromptEditorProps { instanceId?: string compact?: boolean className?: string diff --git a/web/app/components/base/prompt-editor/plugins/component-picker-block/index.tsx b/web/app/components/base/prompt-editor/plugins/component-picker-block/index.tsx index a5cb39f383..39e8092b1e 100644 --- a/web/app/components/base/prompt-editor/plugins/component-picker-block/index.tsx +++ b/web/app/components/base/prompt-editor/plugins/component-picker-block/index.tsx @@ -32,7 +32,7 @@ import type { PickerBlockMenuOption } from './menu' import VarReferenceVars from '@/app/components/workflow/nodes/_base/components/variable/var-reference-vars' import { useEventEmitterContextContext } from '@/context/event-emitter' -type ComponentPickerProps = { +interface ComponentPickerProps { triggerString: string contextBlock?: ContextBlockType queryBlock?: QueryBlockType diff --git a/web/app/components/base/sort/index.tsx b/web/app/components/base/sort/index.tsx index 36f1fdfdf7..5b30a0edb8 100644 --- a/web/app/components/base/sort/index.tsx +++ b/web/app/components/base/sort/index.tsx @@ -14,7 +14,7 @@ export type Item = { name: string } & Record -type Props = { +interface Props { order?: string value: number | string items: Item[] diff --git a/web/app/components/base/tab-slider-plain/index.tsx b/web/app/components/base/tab-slider-plain/index.tsx index a472aba502..194b6ad650 100644 --- a/web/app/components/base/tab-slider-plain/index.tsx +++ b/web/app/components/base/tab-slider-plain/index.tsx @@ -3,12 +3,12 @@ import type { FC } from 'react' import React from 'react' import cn from '@/utils/classnames' -type Option = { +interface Option { value: string text: string | JSX.Element } -type ItemProps = { +interface ItemProps { className?: string isActive: boolean onClick: (v: string) => void @@ -38,7 +38,7 @@ const Item: FC = ({ ) } -type Props = { +interface Props { className?: string value: string onChange: (v: string) => void diff --git a/web/app/components/base/tag-input/index.tsx b/web/app/components/base/tag-input/index.tsx index ec6c1cee34..e3e5cef732 100644 --- a/web/app/components/base/tag-input/index.tsx +++ b/web/app/components/base/tag-input/index.tsx @@ -7,7 +7,7 @@ import { RiAddLine, RiCloseLine } from '@remixicon/react' import cn from '@/utils/classnames' import { useToastContext } from '@/app/components/base/toast' -type TagInputProps = { +interface TagInputProps { items: string[] onChange: (items: string[]) => void disableRemove?: boolean diff --git a/web/app/components/base/tag-management/filter.tsx b/web/app/components/base/tag-management/filter.tsx index 49a3e7cf4f..00408d3b81 100644 --- a/web/app/components/base/tag-management/filter.tsx +++ b/web/app/components/base/tag-management/filter.tsx @@ -18,7 +18,7 @@ import type { Tag } from '@/app/components/base/tag-management/constant' import { fetchTagList } from '@/service/tag' -type TagFilterProps = { +interface TagFilterProps { type: 'knowledge' | 'app' value: string[] onChange: (v: string[]) => void diff --git a/web/app/components/base/tag-management/selector.tsx b/web/app/components/base/tag-management/selector.tsx index d3d912cfd0..01e9eb162f 100644 --- a/web/app/components/base/tag-management/selector.tsx +++ b/web/app/components/base/tag-management/selector.tsx @@ -16,7 +16,7 @@ import Checkbox from '@/app/components/base/checkbox' import { bindTag, createTag, fetchTagList, unBindTag } from '@/service/tag' import { ToastContext } from '@/app/components/base/toast' -type TagSelectorProps = { +interface TagSelectorProps { targetID: string isPopover?: boolean position?: 'bl' | 'br' diff --git a/web/app/components/base/text-generation/types.ts b/web/app/components/base/text-generation/types.ts index 677a8e7d9b..87a79f4637 100644 --- a/web/app/components/base/text-generation/types.ts +++ b/web/app/components/base/text-generation/types.ts @@ -7,32 +7,32 @@ import type { ExternalDataTool } from '@/models/common' export type { VisionFile } from '@/types/app' export { TransferMethod } from '@/types/app' -export type UserInputForm = { +export interface UserInputForm { default: string label: string required: boolean variable: string } -export type UserInputFormTextInput = { +export interface UserInputFormTextInput { 'text-input': UserInputForm & { max_length: number } } -export type UserInputFormSelect = { +export interface UserInputFormSelect { select: UserInputForm & { options: string[] } } -export type UserInputFormParagraph = { +export interface UserInputFormParagraph { paragraph: UserInputForm } export type VisionConfig = VisionSettings -export type EnableType = { +export interface EnableType { enabled: boolean } diff --git a/web/app/components/base/toast/index.tsx b/web/app/components/base/toast/index.tsx index daad66074b..fb18a00d89 100644 --- a/web/app/components/base/toast/index.tsx +++ b/web/app/components/base/toast/index.tsx @@ -51,11 +51,11 @@ const Toast = ({ 'top-0', 'right-0', )}> -
@@ -80,7 +80,7 @@ const Toast = ({ ) }
-
+
} export const ToastProvider = ({ diff --git a/web/app/components/datasets/create/empty-dataset-creation-modal/index.tsx b/web/app/components/datasets/create/empty-dataset-creation-modal/index.tsx index 7702a70d3f..bce8f072d1 100644 --- a/web/app/components/datasets/create/empty-dataset-creation-modal/index.tsx +++ b/web/app/components/datasets/create/empty-dataset-creation-modal/index.tsx @@ -12,7 +12,7 @@ import Button from '@/app/components/base/button' import { ToastContext } from '@/app/components/base/toast' import { createEmptyDataset } from '@/service/datasets' -type IProps = { +interface IProps { show: boolean onHide: () => void } diff --git a/web/app/components/datasets/create/step-two/language-select/index.tsx b/web/app/components/datasets/create/step-two/language-select/index.tsx index 9cbf1a40d1..7730e0539a 100644 --- a/web/app/components/datasets/create/step-two/language-select/index.tsx +++ b/web/app/components/datasets/create/step-two/language-select/index.tsx @@ -6,7 +6,7 @@ import cn from '@/utils/classnames' import Popover from '@/app/components/base/popover' import { languages } from '@/i18n/language' -export type ILanguageSelectProps = { +export interface ILanguageSelectProps { currentLanguage: string onSelect: (language: string) => void disabled?: boolean diff --git a/web/app/components/datasets/create/website/jina-reader/base/checkbox-with-label.tsx b/web/app/components/datasets/create/website/jina-reader/base/checkbox-with-label.tsx index 25d40fe076..609bce3125 100644 --- a/web/app/components/datasets/create/website/jina-reader/base/checkbox-with-label.tsx +++ b/web/app/components/datasets/create/website/jina-reader/base/checkbox-with-label.tsx @@ -5,7 +5,7 @@ import cn from '@/utils/classnames' import Checkbox from '@/app/components/base/checkbox' import Tooltip from '@/app/components/base/tooltip' -type Props = { +interface Props { className?: string isChecked: boolean onChange: (isChecked: boolean) => void diff --git a/web/app/components/datasets/create/website/jina-reader/base/error-message.tsx b/web/app/components/datasets/create/website/jina-reader/base/error-message.tsx index aa337ec4bf..4bd23fa293 100644 --- a/web/app/components/datasets/create/website/jina-reader/base/error-message.tsx +++ b/web/app/components/datasets/create/website/jina-reader/base/error-message.tsx @@ -4,7 +4,7 @@ import React from 'react' import cn from '@/utils/classnames' import { AlertTriangle } from '@/app/components/base/icons/src/vender/solid/alertsAndFeedback' -type Props = { +interface Props { className?: string title: string errorMsg?: string diff --git a/web/app/components/datasets/create/website/jina-reader/base/field.tsx b/web/app/components/datasets/create/website/jina-reader/base/field.tsx index 5b5ca90c5d..363a57bdfc 100644 --- a/web/app/components/datasets/create/website/jina-reader/base/field.tsx +++ b/web/app/components/datasets/create/website/jina-reader/base/field.tsx @@ -5,7 +5,7 @@ import Input from './input' import cn from '@/utils/classnames' import Tooltip from '@/app/components/base/tooltip' -type Props = { +interface Props { className?: string label: string labelClassName?: string diff --git a/web/app/components/datasets/create/website/jina-reader/base/input.tsx b/web/app/components/datasets/create/website/jina-reader/base/input.tsx index 7826f080c5..450ef4f721 100644 --- a/web/app/components/datasets/create/website/jina-reader/base/input.tsx +++ b/web/app/components/datasets/create/website/jina-reader/base/input.tsx @@ -2,7 +2,7 @@ import type { FC } from 'react' import React, { useCallback } from 'react' -type Props = { +interface Props { value: string | number onChange: (value: string | number) => void placeholder?: string diff --git a/web/app/components/datasets/create/website/jina-reader/base/options-wrap.tsx b/web/app/components/datasets/create/website/jina-reader/base/options-wrap.tsx index 652401a20f..6446948961 100644 --- a/web/app/components/datasets/create/website/jina-reader/base/options-wrap.tsx +++ b/web/app/components/datasets/create/website/jina-reader/base/options-wrap.tsx @@ -8,7 +8,7 @@ import { Settings04 } from '@/app/components/base/icons/src/vender/line/general' import { ChevronRight } from '@/app/components/base/icons/src/vender/line/arrows' const I18N_PREFIX = 'datasetCreation.stepOne.website' -type Props = { +interface Props { className?: string children: React.ReactNode controlFoldOptions?: number diff --git a/web/app/components/datasets/create/website/jina-reader/base/url-input.tsx b/web/app/components/datasets/create/website/jina-reader/base/url-input.tsx index e6b0475874..6ed2b9d799 100644 --- a/web/app/components/datasets/create/website/jina-reader/base/url-input.tsx +++ b/web/app/components/datasets/create/website/jina-reader/base/url-input.tsx @@ -7,7 +7,7 @@ import Button from '@/app/components/base/button' const I18N_PREFIX = 'datasetCreation.stepOne.website' -type Props = { +interface Props { isRunning: boolean onRun: (url: string) => void } diff --git a/web/app/components/datasets/create/website/jina-reader/crawled-result-item.tsx b/web/app/components/datasets/create/website/jina-reader/crawled-result-item.tsx index 5531d3e140..4999aeaea6 100644 --- a/web/app/components/datasets/create/website/jina-reader/crawled-result-item.tsx +++ b/web/app/components/datasets/create/website/jina-reader/crawled-result-item.tsx @@ -6,7 +6,7 @@ import cn from '@/utils/classnames' import type { CrawlResultItem as CrawlResultItemType } from '@/models/datasets' import Checkbox from '@/app/components/base/checkbox' -type Props = { +interface Props { payload: CrawlResultItemType isChecked: boolean isPreview: boolean diff --git a/web/app/components/datasets/create/website/jina-reader/crawled-result.tsx b/web/app/components/datasets/create/website/jina-reader/crawled-result.tsx index 2bd51e4d73..1746fe63e3 100644 --- a/web/app/components/datasets/create/website/jina-reader/crawled-result.tsx +++ b/web/app/components/datasets/create/website/jina-reader/crawled-result.tsx @@ -9,7 +9,7 @@ import type { CrawlResultItem } from '@/models/datasets' const I18N_PREFIX = 'datasetCreation.stepOne.website' -type Props = { +interface Props { className?: string list: CrawlResultItem[] checkedList: CrawlResultItem[] diff --git a/web/app/components/datasets/create/website/jina-reader/crawling.tsx b/web/app/components/datasets/create/website/jina-reader/crawling.tsx index ee26e7671a..b84a938d22 100644 --- a/web/app/components/datasets/create/website/jina-reader/crawling.tsx +++ b/web/app/components/datasets/create/website/jina-reader/crawling.tsx @@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next' import cn from '@/utils/classnames' import { RowStruct } from '@/app/components/base/icons/src/public/other' -type Props = { +interface Props { className?: string crawledNum: number totalNum: number diff --git a/web/app/components/datasets/documents/detail/completed/index.tsx b/web/app/components/datasets/documents/detail/completed/index.tsx index 69cba1d8cd..c833688fbe 100644 --- a/web/app/components/datasets/documents/detail/completed/index.tsx +++ b/web/app/components/datasets/documents/detail/completed/index.tsx @@ -79,7 +79,7 @@ export const useSegmentListContext = (selector: (value: SegmentListContextValue) return useContextSelector(SegmentListContext, selector) } -type ICompletedProps = { +interface ICompletedProps { embeddingAvailable: boolean showNewSegmentModal: boolean onNewSegmentModalChange: (state: boolean) => void diff --git a/web/app/components/datasets/documents/detail/metadata/index.tsx b/web/app/components/datasets/documents/detail/metadata/index.tsx index 4a5560203e..da96b859be 100644 --- a/web/app/components/datasets/documents/detail/metadata/index.tsx +++ b/web/app/components/datasets/documents/detail/metadata/index.tsx @@ -29,7 +29,7 @@ const map2Options = (map: { [key: string]: string }) => { return Object.keys(map).map(key => ({ value: key, name: map[key] })) } -type IFieldInfoProps = { +interface IFieldInfoProps { label: string value?: string valueIcon?: ReactNode @@ -117,7 +117,7 @@ const IconButton: FC<{ ) } -type IMetadataProps = { +interface IMetadataProps { docDetail?: FullDocumentDetail loading: boolean onUpdate: () => void diff --git a/web/app/components/datasets/documents/detail/settings/index.tsx b/web/app/components/datasets/documents/detail/settings/index.tsx index 05c52d4de8..6bd6aaa265 100644 --- a/web/app/components/datasets/documents/detail/settings/index.tsx +++ b/web/app/components/datasets/documents/detail/settings/index.tsx @@ -16,7 +16,7 @@ import { ModelTypeEnum } from '@/app/components/header/account-setting/model-pro import type { NotionPage } from '@/models/common' import { useDocumentDetail, useInvalidDocumentDetailKey } from '@/service/knowledge/use-document' -type DocumentSettingsProps = { +interface DocumentSettingsProps { datasetId: string documentId: string } diff --git a/web/app/components/datasets/documents/index.tsx b/web/app/components/datasets/documents/index.tsx index c9df2f28e2..bbd1c03214 100644 --- a/web/app/components/datasets/documents/index.tsx +++ b/web/app/components/datasets/documents/index.tsx @@ -73,7 +73,7 @@ const EmptyElement: FC<{ canAdd: boolean; onClick: () => void; type?: 'upload' | } -type IDocumentsProps = { +interface IDocumentsProps { datasetId: string } diff --git a/web/app/components/datasets/documents/rename-modal.tsx b/web/app/components/datasets/documents/rename-modal.tsx index 883897b510..0b73c37207 100644 --- a/web/app/components/datasets/documents/rename-modal.tsx +++ b/web/app/components/datasets/documents/rename-modal.tsx @@ -9,7 +9,7 @@ import Button from '@/app/components/base/button' import Input from '@/app/components/base/input' import { renameDocumentName } from '@/service/datasets' -type Props = { +interface Props { datasetId: string documentId: string name: string diff --git a/web/app/components/datasets/hit-testing/textarea.tsx b/web/app/components/datasets/hit-testing/textarea.tsx index fcd72a2f1b..1ae715b528 100644 --- a/web/app/components/datasets/hit-testing/textarea.tsx +++ b/web/app/components/datasets/hit-testing/textarea.tsx @@ -15,7 +15,7 @@ import { asyncRunSafe } from '@/utils' import { RETRIEVE_METHOD, type RetrievalConfig } from '@/types/app' import promptS from '@/app/components/app/configuration/config-prompt/style.module.css' -type TextAreaWithButtonIProps = { +interface TextAreaWithButtonIProps { datasetId: string onUpdateList: () => void setHitResult: (res: HitTestingResponse) => void diff --git a/web/app/components/datasets/rename-modal/index.tsx b/web/app/components/datasets/rename-modal/index.tsx index e93862f63d..22c7f8e988 100644 --- a/web/app/components/datasets/rename-modal/index.tsx +++ b/web/app/components/datasets/rename-modal/index.tsx @@ -14,7 +14,7 @@ import { ToastContext } from '@/app/components/base/toast' import type { DataSet } from '@/models/datasets' import { updateDatasetSetting } from '@/service/datasets' -type RenameDatasetModalProps = { +interface RenameDatasetModalProps { show: boolean dataset: DataSet onSuccess?: () => void diff --git a/web/app/components/datasets/settings/permission-selector/index.tsx b/web/app/components/datasets/settings/permission-selector/index.tsx index f70e41d46f..ace8c4512b 100644 --- a/web/app/components/datasets/settings/permission-selector/index.tsx +++ b/web/app/components/datasets/settings/permission-selector/index.tsx @@ -15,7 +15,7 @@ import { Users01, UsersPlus } from '@/app/components/base/icons/src/vender/solid import type { DatasetPermission } from '@/models/datasets' import { useAppContext } from '@/context/app-context' import type { Member } from '@/models/common' -export type RoleSelectorProps = { +export interface RoleSelectorProps { disabled?: boolean permission?: DatasetPermission value: string[] diff --git a/web/app/components/explore/create-app-modal/index.tsx b/web/app/components/explore/create-app-modal/index.tsx index 45baf773f8..152f3b9282 100644 --- a/web/app/components/explore/create-app-modal/index.tsx +++ b/web/app/components/explore/create-app-modal/index.tsx @@ -14,7 +14,7 @@ import { useProviderContext } from '@/context/provider-context' import AppsFull from '@/app/components/billing/apps-full-in-dialog' import type { AppIconType } from '@/types/app' -export type CreateAppModalProps = { +export interface CreateAppModalProps { show: boolean isEditModal?: boolean appName: string diff --git a/web/app/components/header/account-setting/index.tsx b/web/app/components/header/account-setting/index.tsx index 257c060f5c..b3409c226a 100644 --- a/web/app/components/header/account-setting/index.tsx +++ b/web/app/components/header/account-setting/index.tsx @@ -17,6 +17,7 @@ import { RiPuzzle2Line, RiTranslate2, } from '@remixicon/react' +import Button from '../../base/button' import MembersPage from './members-page' import LanguagePage from './language-page' import ApiBasedExtensionPage from './api-based-extension-page' @@ -30,7 +31,6 @@ import { useProviderContext } from '@/context/provider-context' import { useAppContext } from '@/context/app-context' import MenuDialog from '@/app/components/header/account-setting/menu-dialog' import Input from '@/app/components/base/input' -import Button from '@/app/components/base/button' const iconClassName = ` w-5 h-5 mr-2 diff --git a/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx b/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx index 0576672a4f..9fa12825fc 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx @@ -7,6 +7,7 @@ import { useLanguage } from '../hooks' import { Group } from '@/app/components/base/icons/src/vender/other' import { OpenaiBlue, OpenaiViolet } from '@/app/components/base/icons/src/public/llm' import cn from '@/utils/classnames' +import { renderI18nObject } from '@/hooks/use-i18n' type ModelIconProps = { provider?: Model | ModelProvider @@ -22,16 +23,16 @@ const ModelIcon: FC = ({ }) => { const language = useLanguage() if (provider?.provider.includes('openai') && modelName?.includes('gpt-4o')) - return
+ return
if (provider?.provider.includes('openai') && modelName?.startsWith('gpt-4')) - return
+ return
if (provider?.icon_small) { return (
model-icon
diff --git a/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx b/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx index 25105b1193..6012b921be 100644 --- a/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx @@ -1,7 +1,11 @@ import type { FC } from 'react' import type { ModelProvider } from '../declarations' import { useLanguage } from '../hooks' -import { AnthropicText, Openai } from '@/app/components/base/icons/src/vender/other' +import { Openai } from '@/app/components/base/icons/src/vender/other' +import { useAppContext } from '@/context/app-context' +import { AnthropicDark, AnthropicLight } from '@/app/components/base/icons/src/public/llm' +import { renderI18nObject } from '@/hooks/use-i18n' +import { Theme } from '@/types/app' import cn from '@/utils/classnames' type ProviderIconProps = { @@ -12,12 +16,14 @@ const ProviderIcon: FC = ({ provider, className, }) => { + const { theme } = useAppContext() const language = useLanguage() if (provider.provider === 'langgenius/anthropic/anthropic') { return ( -
- +
+ {theme === Theme.dark && } + {theme === Theme.light && }
) } @@ -34,11 +40,11 @@ const ProviderIcon: FC = ({
provider-icon
- {provider.label[language] || provider.label.en_US} + {renderI18nObject(provider.label, language)}
) diff --git a/web/app/components/header/index.tsx b/web/app/components/header/index.tsx index be2ba22086..d0807ff8ad 100644 --- a/web/app/components/header/index.tsx +++ b/web/app/components/header/index.tsx @@ -86,7 +86,7 @@ const Header = () => {
} - + {isMobile && (
diff --git a/web/app/components/i18n.tsx b/web/app/components/i18n.tsx index 7fe1df23e0..f04f8d6cbe 100644 --- a/web/app/components/i18n.tsx +++ b/web/app/components/i18n.tsx @@ -2,7 +2,6 @@ import type { FC } from 'react' import React, { useEffect } from 'react' -import { changeLanguage } from '@/i18n/i18next-config' import I18NContext from '@/context/i18n' import type { Locale } from '@/i18n' import { setLocaleOnClient } from '@/i18n' @@ -16,7 +15,7 @@ const I18n: FC = ({ children, }) => { useEffect(() => { - changeLanguage(locale) + setLocaleOnClient(locale, false) }, [locale]) return ( diff --git a/web/app/components/plugins/card/index.tsx b/web/app/components/plugins/card/index.tsx index 04ef0dd1ee..871f5cb62f 100644 --- a/web/app/components/plugins/card/index.tsx +++ b/web/app/components/plugins/card/index.tsx @@ -13,6 +13,7 @@ import { useGetLanguage } from '@/context/i18n' import { getLanguage } from '@/i18n/language' import { useSingleCategories } from '../hooks' import { renderI18nObject } from '@/hooks/use-i18n' +import { useMixedTranslation } from '@/app/components/plugins/marketplace/hooks' export type Props = { className?: string @@ -43,9 +44,10 @@ const Card = ({ }: Props) => { const defaultLocale = useGetLanguage() const locale = localeFromProps ? getLanguage(localeFromProps) : defaultLocale - const { categoriesMap } = useSingleCategories() + const { t } = useMixedTranslation(localeFromProps) + const { categoriesMap } = useSingleCategories(t) const { category, type, name, org, label, brief, icon, verified } = payload - const isBundle = !['plugin', 'model', 'tool', 'extension', 'agent_strategy'].includes(type) + const isBundle = !['plugin', 'model', 'tool', 'extension', 'agent-strategy'].includes(type) const cornerMark = isBundle ? categoriesMap.bundle?.label : categoriesMap[category]?.label const getLocalizedText = (obj: Record | undefined) => obj ? renderI18nObject(obj, locale) : '' diff --git a/web/app/components/plugins/marketplace/description/index.tsx b/web/app/components/plugins/marketplace/description/index.tsx index 337f456e6d..bf232798c4 100644 --- a/web/app/components/plugins/marketplace/description/index.tsx +++ b/web/app/components/plugins/marketplace/description/index.tsx @@ -12,6 +12,7 @@ const Description = async ({ const localeDefault = getLocaleOnServer() const { t } = await translate(localeFromProps || localeDefault, 'plugin') const { t: tCommon } = await translate(localeFromProps || localeDefault, 'common') + const isZhHans = localeFromProps === 'zh-Hans' return ( <> @@ -19,7 +20,22 @@ const Description = async ({ {t('marketplace.empower')}

- {t('marketplace.discover')} + { + isZhHans && ( + <> + {tCommon('operation.in')} + {t('marketplace.difyMarketplace')} + {t('marketplace.discover')} + + ) + } + { + !isZhHans && ( + <> + {t('marketplace.discover')} + + ) + } {t('category.models')} @@ -39,9 +55,15 @@ const Description = async ({ {t('category.bundles')} - {tCommon('operation.in')} - {t('marketplace.difyMarketplace')} -

+ { + !isZhHans && ( + <> + {tCommon('operation.in')} + {t('marketplace.difyMarketplace')} + + ) + } + ) } diff --git a/web/app/components/plugins/marketplace/hooks.ts b/web/app/components/plugins/marketplace/hooks.ts index 8f811d1b3e..83a6709a47 100644 --- a/web/app/components/plugins/marketplace/hooks.ts +++ b/web/app/components/plugins/marketplace/hooks.ts @@ -72,7 +72,7 @@ export const useMarketplacePlugins = () => { const handleUpdatePlugins = useCallback((pluginsSearchParams: PluginsSearchParams) => { mutateAsync(pluginsSearchParams).then((res) => { const currentPage = pluginsSearchParams.page || 1 - const resPlugins = res.data.plugins + const resPlugins = res.data.bundles || res.data.plugins if (currentPage > 1) { setPrevPlugins(prevPlugins => [...(prevPlugins || []), ...resPlugins.map((plugin) => { return getFormattedPlugin(plugin) diff --git a/web/app/components/plugins/marketplace/intersection-line/index.tsx b/web/app/components/plugins/marketplace/intersection-line/index.tsx index 6f8e4b02ab..94f592410a 100644 --- a/web/app/components/plugins/marketplace/intersection-line/index.tsx +++ b/web/app/components/plugins/marketplace/intersection-line/index.tsx @@ -14,7 +14,7 @@ const IntersectionLine = ({ useScrollIntersection(ref, intersectionContainerId) return ( -
+
) } diff --git a/web/app/components/plugins/marketplace/list/list-wrapper.tsx b/web/app/components/plugins/marketplace/list/list-wrapper.tsx index 2dc83ee831..761adee5f8 100644 --- a/web/app/components/plugins/marketplace/list/list-wrapper.tsx +++ b/web/app/components/plugins/marketplace/list/list-wrapper.tsx @@ -37,7 +37,7 @@ const ListWrapper = ({ }, [handleQueryPlugins, marketplaceCollections, marketplaceCollectionsFromClient, isSuccessCollections]) return ( -
+
{ plugins && (
diff --git a/web/app/components/plugins/marketplace/utils.ts b/web/app/components/plugins/marketplace/utils.ts index 78d4437681..0c00a98d6d 100644 --- a/web/app/components/plugins/marketplace/utils.ts +++ b/web/app/components/plugins/marketplace/utils.ts @@ -110,6 +110,9 @@ export const getMarketplaceListCondition = (pluginType: string) => { if (pluginType === PluginType.extension) return 'category=endpoint' + if (pluginType === 'bundle') + return 'type=bundle' + return '' } diff --git a/web/app/components/plugins/plugin-detail-panel/tool-selector/index.tsx b/web/app/components/plugins/plugin-detail-panel/tool-selector/index.tsx index 1be8498788..8e9966e8b8 100644 --- a/web/app/components/plugins/plugin-detail-panel/tool-selector/index.tsx +++ b/web/app/components/plugins/plugin-detail-panel/tool-selector/index.tsx @@ -111,6 +111,7 @@ const ToolSelector: FC = ({ const paramValues = addDefaultValue(tool.params, toolParametersToFormSchemas(tool.paramSchemas.filter(param => param.form !== 'llm') as any)) const toolValue = { provider_name: tool.provider_id, + type: tool.provider_type, tool_name: tool.tool_name, parameters: paramValues, enabled: tool.is_team_authorization, diff --git a/web/app/components/plugins/plugin-item/action.tsx b/web/app/components/plugins/plugin-item/action.tsx index 1bc34c9928..19e611fa09 100644 --- a/web/app/components/plugins/plugin-item/action.tsx +++ b/web/app/components/plugins/plugin-item/action.tsx @@ -54,7 +54,9 @@ const Action: FC = ({ const invalidateInstalledPluginList = useInvalidateInstalledPluginList() const handleFetchNewVersion = async () => { - const fetchedReleases = await fetchReleases(author, pluginName) + const owner = meta!.repo.split('/')[0] || author + const repo = meta!.repo.split('/')[1] || pluginName + const fetchedReleases = await fetchReleases(owner, repo) if (fetchedReleases.length === 0) return const { needUpdate, toastProps } = checkForUpdates(fetchedReleases, meta!.version) Toast.notify(toastProps) @@ -92,7 +94,7 @@ const Action: FC = ({ hideDeleteConfirm() onDelete() } - // eslint-disable-next-line react-hooks/exhaustive-deps + // eslint-disable-next-line react-hooks/exhaustive-deps }, [installationId, onDelete]) return (
diff --git a/web/app/components/plugins/plugin-page/debug-info.tsx b/web/app/components/plugins/plugin-page/debug-info.tsx index 303a12c2aa..e4d249f3e2 100644 --- a/web/app/components/plugins/plugin-page/debug-info.tsx +++ b/web/app/components/plugins/plugin-page/debug-info.tsx @@ -29,7 +29,7 @@ const DebugInfo: FC = () => { popupContent={ <>
- {t(`${i18nPrefix}.title`)} + {t(`${i18nPrefix}.title`)} {t(`${i18nPrefix}.viewDocs`)} diff --git a/web/app/components/plugins/types.ts b/web/app/components/plugins/types.ts index 15da9991a6..e9f7884257 100644 --- a/web/app/components/plugins/types.ts +++ b/web/app/components/plugins/types.ts @@ -320,6 +320,7 @@ export type UninstallPluginResponse = { export type PluginsFromMarketplaceResponse = { plugins: Plugin[] + bundles?: Plugin[] total: number } export type PluginsFromMarketplaceByInfoResponse = { diff --git a/web/app/components/share/text-generation/result/index.tsx b/web/app/components/share/text-generation/result/index.tsx index cd4ed5d287..6d5c63273a 100644 --- a/web/app/components/share/text-generation/result/index.tsx +++ b/web/app/components/share/text-generation/result/index.tsx @@ -24,7 +24,7 @@ import { getFilesInLogs, } from '@/app/components/base/file-uploader/utils' -export type IResultProps = { +export interface IResultProps { isWorkflow: boolean isCallBatchAPI: boolean isPC: boolean diff --git a/web/app/components/swr-initor.tsx b/web/app/components/swr-initor.tsx index 2a119df996..a2ae003139 100644 --- a/web/app/components/swr-initor.tsx +++ b/web/app/components/swr-initor.tsx @@ -6,7 +6,7 @@ import type { ReactNode } from 'react' import { usePathname, useRouter, useSearchParams } from 'next/navigation' import { fetchSetupStatus } from '@/service/common' -type SwrInitorProps = { +interface SwrInitorProps { children: ReactNode } const SwrInitor = ({ diff --git a/web/app/components/workflow/nodes/_base/components/add-button.tsx b/web/app/components/workflow/nodes/_base/components/add-button.tsx index 1e7323c8d7..c7fdcaa009 100644 --- a/web/app/components/workflow/nodes/_base/components/add-button.tsx +++ b/web/app/components/workflow/nodes/_base/components/add-button.tsx @@ -7,7 +7,7 @@ import { import cn from '@/utils/classnames' import Button from '@/app/components/base/button' -type Props = { +interface Props { className?: string text: string onClick: () => void diff --git a/web/app/components/workflow/nodes/_base/components/before-run-form/form-item.tsx b/web/app/components/workflow/nodes/_base/components/before-run-form/form-item.tsx index e2ca592a62..a4885759c4 100644 --- a/web/app/components/workflow/nodes/_base/components/before-run-form/form-item.tsx +++ b/web/app/components/workflow/nodes/_base/components/before-run-form/form-item.tsx @@ -25,7 +25,7 @@ import { BubbleX } from '@/app/components/base/icons/src/vender/line/others' import { FILE_EXTS } from '@/app/components/base/prompt-editor/constants' import cn from '@/utils/classnames' -type Props = { +interface Props { payload: InputVar value: any onChange: (value: any) => void diff --git a/web/app/components/workflow/nodes/_base/components/before-run-form/form.tsx b/web/app/components/workflow/nodes/_base/components/before-run-form/form.tsx index 967e796373..e9ac442680 100644 --- a/web/app/components/workflow/nodes/_base/components/before-run-form/form.tsx +++ b/web/app/components/workflow/nodes/_base/components/before-run-form/form.tsx @@ -9,7 +9,7 @@ import { InputVarType } from '@/app/components/workflow/types' import AddButton from '@/app/components/base/button/add-button' import { RETRIEVAL_OUTPUT_STRUCT } from '@/app/components/workflow/constants' -export type Props = { +export interface Props { className?: string label?: string inputs: InputVar[] diff --git a/web/app/components/workflow/nodes/_base/components/editor/base.tsx b/web/app/components/workflow/nodes/_base/components/editor/base.tsx index ead88b86dd..9e20a3857f 100644 --- a/web/app/components/workflow/nodes/_base/components/editor/base.tsx +++ b/web/app/components/workflow/nodes/_base/components/editor/base.tsx @@ -16,7 +16,7 @@ import useToggleExpend from '@/app/components/workflow/nodes/_base/hooks/use-tog import type { FileEntity } from '@/app/components/base/file-uploader/types' import FileListInLog from '@/app/components/base/file-uploader/file-list-in-log' -type Props = { +interface Props { className?: string title: JSX.Element | string headerRight?: JSX.Element diff --git a/web/app/components/workflow/nodes/_base/components/input-var-type-icon.tsx b/web/app/components/workflow/nodes/_base/components/input-var-type-icon.tsx index d3cc1dbc78..178adc5ea3 100644 --- a/web/app/components/workflow/nodes/_base/components/input-var-type-icon.tsx +++ b/web/app/components/workflow/nodes/_base/components/input-var-type-icon.tsx @@ -4,7 +4,7 @@ import React from 'react' import { RiAlignLeft, RiCheckboxMultipleLine, RiFileCopy2Line, RiFileList2Line, RiHashtag, RiTextSnippet } from '@remixicon/react' import { InputVarType } from '../../../types' -type Props = { +interface Props { className?: string type: InputVarType } diff --git a/web/app/components/workflow/nodes/_base/components/split.tsx b/web/app/components/workflow/nodes/_base/components/split.tsx index 28cd05f6da..9f773ba960 100644 --- a/web/app/components/workflow/nodes/_base/components/split.tsx +++ b/web/app/components/workflow/nodes/_base/components/split.tsx @@ -3,7 +3,7 @@ import type { FC } from 'react' import React from 'react' import cn from '@/utils/classnames' -type Props = { +interface Props { className?: string } diff --git a/web/app/components/workflow/nodes/_base/components/variable-tag.tsx b/web/app/components/workflow/nodes/_base/components/variable-tag.tsx index 0c5c3bde4b..67c8bb57df 100644 --- a/web/app/components/workflow/nodes/_base/components/variable-tag.tsx +++ b/web/app/components/workflow/nodes/_base/components/variable-tag.tsx @@ -19,7 +19,7 @@ import Tooltip from '@/app/components/base/tooltip' import cn from '@/utils/classnames' import { isExceptionVariable } from '@/app/components/workflow/utils' -type VariableTagProps = { +interface VariableTagProps { valueSelector: ValueSelector varType: VarType isShort?: boolean diff --git a/web/app/components/workflow/nodes/_base/components/variable/output-var-list.tsx b/web/app/components/workflow/nodes/_base/components/variable/output-var-list.tsx index 1c07461b42..17c0061f0e 100644 --- a/web/app/components/workflow/nodes/_base/components/variable/output-var-list.tsx +++ b/web/app/components/workflow/nodes/_base/components/variable/output-var-list.tsx @@ -11,7 +11,7 @@ import type { VarType } from '@/app/components/workflow/types' import { checkKeys } from '@/utils/var' import Toast from '@/app/components/base/toast' -type Props = { +interface Props { readonly: boolean outputs: OutputVar outputKeyOrders: string[] diff --git a/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx b/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx index caec6706ab..3b7845003a 100644 --- a/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx +++ b/web/app/components/workflow/nodes/_base/components/variable/var-reference-vars.tsx @@ -17,7 +17,7 @@ import { BubbleX, Env } from '@/app/components/base/icons/src/vender/line/others import { checkKeys } from '@/utils/var' import { FILE_STRUCT } from '@/app/components/workflow/constants' -type ObjectChildrenProps = { +interface ObjectChildrenProps { nodeId: string title: string data: Var[] @@ -28,7 +28,7 @@ type ObjectChildrenProps = { isSupportFileVar?: boolean } -type ItemProps = { +interface ItemProps { nodeId: string title: string objPath: string[] @@ -226,7 +226,7 @@ const ObjectChildren: FC = ({ ) } -type Props = { +interface Props { hideSearch?: boolean searchBoxClassName?: string vars: NodeOutPutVar[] diff --git a/web/app/components/workflow/nodes/agent/panel.tsx b/web/app/components/workflow/nodes/agent/panel.tsx index a0210d0363..50eadf9b3e 100644 --- a/web/app/components/workflow/nodes/agent/panel.tsx +++ b/web/app/components/workflow/nodes/agent/panel.tsx @@ -15,6 +15,7 @@ import formatTracing from '@/app/components/workflow/run/utils/format-log' import { useLogs } from '@/app/components/workflow/run/hooks' import type { Props as FormProps } from '@/app/components/workflow/nodes/_base/components/before-run-form/form' import { toType } from '@/app/components/tools/utils/to-form-schema' +import { useStore } from '../../store' const i18nPrefix = 'workflow.nodes.agent' @@ -72,6 +73,8 @@ const AgentPanel: FC> = (props) => { return forms })() + const resetEditor = useStore(s => s.setControlPromptEditorRerenderKey) + return
> = (props) => { agent_strategy_label: strategy?.agent_strategy_label, output_schema: strategy!.agent_output_schema, plugin_unique_identifier: strategy!.plugin_unique_identifier, + agent_parameters: {}, }) + resetEditor(Date.now()) }} formSchema={currentStrategy?.parameters?.map(strategyParamToCredientialForm) || []} formValue={formData} diff --git a/web/app/components/workflow/nodes/http/components/authorization/index.tsx b/web/app/components/workflow/nodes/http/components/authorization/index.tsx index 7110188dbe..5f0cf8d888 100644 --- a/web/app/components/workflow/nodes/http/components/authorization/index.tsx +++ b/web/app/components/workflow/nodes/http/components/authorization/index.tsx @@ -17,7 +17,7 @@ import cn from '@/utils/classnames' const i18nPrefix = 'workflow.nodes.http.authorization' -type Props = { +interface Props { nodeId: string payload: AuthorizationPayloadType onChange: (payload: AuthorizationPayloadType) => void diff --git a/web/app/components/workflow/nodes/http/components/edit-body/index.tsx b/web/app/components/workflow/nodes/http/components/edit-body/index.tsx index b58cc68064..0297d3102e 100644 --- a/web/app/components/workflow/nodes/http/components/edit-body/index.tsx +++ b/web/app/components/workflow/nodes/http/components/edit-body/index.tsx @@ -15,7 +15,7 @@ import { VarType } from '@/app/components/workflow/types' const UNIQUE_ID_PREFIX = 'key-value-' -type Props = { +interface Props { readonly: boolean nodeId: string payload: Body diff --git a/web/app/components/workflow/nodes/http/components/key-value/index.tsx b/web/app/components/workflow/nodes/http/components/key-value/index.tsx index e930114f32..97f69ff2db 100644 --- a/web/app/components/workflow/nodes/http/components/key-value/index.tsx +++ b/web/app/components/workflow/nodes/http/components/key-value/index.tsx @@ -4,7 +4,7 @@ import React from 'react' import type { KeyValue } from '../../types' import KeyValueEdit from './key-value-edit' -type Props = { +interface Props { readonly: boolean nodeId: string list: KeyValue[] diff --git a/web/app/components/workflow/nodes/http/components/key-value/key-value-edit/index.tsx b/web/app/components/workflow/nodes/http/components/key-value/key-value-edit/index.tsx index adf7f966e0..dac2c1c17c 100644 --- a/web/app/components/workflow/nodes/http/components/key-value/key-value-edit/index.tsx +++ b/web/app/components/workflow/nodes/http/components/key-value/key-value-edit/index.tsx @@ -9,7 +9,7 @@ import cn from '@/utils/classnames' const i18nPrefix = 'workflow.nodes.http' -type Props = { +interface Props { readonly: boolean nodeId: string list: KeyValue[] diff --git a/web/app/components/workflow/nodes/http/components/key-value/key-value-edit/input-item.tsx b/web/app/components/workflow/nodes/http/components/key-value/key-value-edit/input-item.tsx index b6d2904d64..fdaeefbc4a 100644 --- a/web/app/components/workflow/nodes/http/components/key-value/key-value-edit/input-item.tsx +++ b/web/app/components/workflow/nodes/http/components/key-value/key-value-edit/input-item.tsx @@ -8,7 +8,7 @@ import RemoveButton from '@/app/components/workflow/nodes/_base/components/remov import Input from '@/app/components/workflow/nodes/_base/components/input-support-select-var' import type { Var } from '@/app/components/workflow/types' import { VarType } from '@/app/components/workflow/types' -type Props = { +interface Props { className?: string instanceId?: string nodeId: string diff --git a/web/app/components/workflow/nodes/http/components/key-value/key-value-edit/item.tsx b/web/app/components/workflow/nodes/http/components/key-value/key-value-edit/item.tsx index 75c6a77873..9c64f9f764 100644 --- a/web/app/components/workflow/nodes/http/components/key-value/key-value-edit/item.tsx +++ b/web/app/components/workflow/nodes/http/components/key-value/key-value-edit/item.tsx @@ -14,7 +14,7 @@ import { VarType } from '@/app/components/workflow/types' const i18nPrefix = 'workflow.nodes.http' -type Props = { +interface Props { instanceId: string className?: string nodeId: string diff --git a/web/app/components/workflow/nodes/http/components/timeout/index.tsx b/web/app/components/workflow/nodes/http/components/timeout/index.tsx index b378eedde7..2de3bd443c 100644 --- a/web/app/components/workflow/nodes/http/components/timeout/index.tsx +++ b/web/app/components/workflow/nodes/http/components/timeout/index.tsx @@ -6,7 +6,7 @@ import type { Timeout as TimeoutPayloadType } from '../../types' import Input from '@/app/components/base/input' import { FieldCollapse } from '@/app/components/workflow/nodes/_base/components/collapse' -type Props = { +interface Props { readonly: boolean nodeId: string payload: TimeoutPayloadType diff --git a/web/app/components/workflow/nodes/http/types.ts b/web/app/components/workflow/nodes/http/types.ts index f1937ec5bd..775d621eea 100644 --- a/web/app/components/workflow/nodes/http/types.ts +++ b/web/app/components/workflow/nodes/http/types.ts @@ -18,7 +18,7 @@ export enum BodyType { binary = 'binary', } -export type KeyValue = { +export interface KeyValue { id?: string key: string value: string @@ -38,7 +38,7 @@ export type BodyPayload = { file?: ValueSelector // when type is file value?: string // when type is text }[] -export type Body = { +export interface Body { type: BodyType data: string | BodyPayload // string is deprecated, it would convert to BodyPayload after loaded } @@ -54,7 +54,7 @@ export enum APIType { custom = 'custom', } -export type Authorization = { +export interface Authorization { type: AuthorizationType config?: { type: APIType @@ -63,7 +63,7 @@ export type Authorization = { } | null } -export type Timeout = { +export interface Timeout { connect?: number read?: number write?: number diff --git a/web/app/components/workflow/nodes/if-else/components/condition-add.tsx b/web/app/components/workflow/nodes/if-else/components/condition-add.tsx index 344e986305..8b14a59dcc 100644 --- a/web/app/components/workflow/nodes/if-else/components/condition-add.tsx +++ b/web/app/components/workflow/nodes/if-else/components/condition-add.tsx @@ -18,7 +18,7 @@ import type { Var, } from '@/app/components/workflow/types' -type ConditionAddProps = { +interface ConditionAddProps { className?: string caseId: string variables: NodeOutPutVar[] diff --git a/web/app/components/workflow/nodes/if-else/components/condition-list/condition-item.tsx b/web/app/components/workflow/nodes/if-else/components/condition-list/condition-item.tsx index 818383c750..2e89c73074 100644 --- a/web/app/components/workflow/nodes/if-else/components/condition-list/condition-item.tsx +++ b/web/app/components/workflow/nodes/if-else/components/condition-list/condition-item.tsx @@ -39,7 +39,7 @@ import { SimpleSelect as Select } from '@/app/components/base/select' import { Variable02 } from '@/app/components/base/icons/src/vender/solid/development' const optionNameI18NPrefix = 'workflow.nodes.ifElse.optionName' -type ConditionItemProps = { +interface ConditionItemProps { className?: string disabled?: boolean caseId: string diff --git a/web/app/components/workflow/nodes/if-else/components/condition-list/condition-operator.tsx b/web/app/components/workflow/nodes/if-else/components/condition-list/condition-operator.tsx index ecbe53f689..afd9b1bccd 100644 --- a/web/app/components/workflow/nodes/if-else/components/condition-list/condition-operator.tsx +++ b/web/app/components/workflow/nodes/if-else/components/condition-list/condition-operator.tsx @@ -16,7 +16,7 @@ import type { VarType } from '@/app/components/workflow/types' import cn from '@/utils/classnames' const i18nPrefix = 'workflow.nodes.ifElse' -type ConditionOperatorProps = { +interface ConditionOperatorProps { className?: string disabled?: boolean varType: VarType diff --git a/web/app/components/workflow/nodes/if-else/components/condition-list/index.tsx b/web/app/components/workflow/nodes/if-else/components/condition-list/index.tsx index 05b5df4f4a..7417cd1077 100644 --- a/web/app/components/workflow/nodes/if-else/components/condition-list/index.tsx +++ b/web/app/components/workflow/nodes/if-else/components/condition-list/index.tsx @@ -19,7 +19,7 @@ import type { } from '@/app/components/workflow/types' import cn from '@/utils/classnames' -type ConditionListProps = { +interface ConditionListProps { isSubVariable?: boolean disabled?: boolean caseId: string diff --git a/web/app/components/workflow/nodes/if-else/components/condition-number-input.tsx b/web/app/components/workflow/nodes/if-else/components/condition-number-input.tsx index 5dabd967cd..95b0aa0c02 100644 --- a/web/app/components/workflow/nodes/if-else/components/condition-number-input.tsx +++ b/web/app/components/workflow/nodes/if-else/components/condition-number-input.tsx @@ -30,7 +30,7 @@ const options = [ NumberVarType.constant, ] -type ConditionNumberInputProps = { +interface ConditionNumberInputProps { numberVarType?: NumberVarType onNumberVarTypeChange: (v: NumberVarType) => void value: string diff --git a/web/app/components/workflow/nodes/if-else/components/condition-value.tsx b/web/app/components/workflow/nodes/if-else/components/condition-value.tsx index e997c2cbd2..3a71b85a0e 100644 --- a/web/app/components/workflow/nodes/if-else/components/condition-value.tsx +++ b/web/app/components/workflow/nodes/if-else/components/condition-value.tsx @@ -20,7 +20,7 @@ import type { Node, } from '@/app/components/workflow/types' -type ConditionValueProps = { +interface ConditionValueProps { variableSelector: string[] labelName?: string operator: ComparisonOperator diff --git a/web/app/components/workflow/nodes/if-else/types.ts b/web/app/components/workflow/nodes/if-else/types.ts index 56952de25a..22238b3389 100644 --- a/web/app/components/workflow/nodes/if-else/types.ts +++ b/web/app/components/workflow/nodes/if-else/types.ts @@ -35,7 +35,7 @@ export enum ComparisonOperator { notExists = 'not exists', } -export type Condition = { +export interface Condition { id: string varType: VarType variable_selector?: ValueSelector @@ -46,7 +46,7 @@ export type Condition = { sub_variable_condition?: CaseItem } -export type CaseItem = { +export interface CaseItem { case_id: string logical_operator: LogicalOperator conditions: Condition[] diff --git a/web/app/components/workflow/nodes/knowledge-retrieval/components/dataset-item.tsx b/web/app/components/workflow/nodes/knowledge-retrieval/components/dataset-item.tsx index 3e9be6485b..b0d992fcf2 100644 --- a/web/app/components/workflow/nodes/knowledge-retrieval/components/dataset-item.tsx +++ b/web/app/components/workflow/nodes/knowledge-retrieval/components/dataset-item.tsx @@ -18,7 +18,7 @@ import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints' import Badge from '@/app/components/base/badge' import { useKnowledge } from '@/hooks/use-knowledge' -type Props = { +interface Props { payload: DataSet onRemove: () => void onChange: (dataSet: DataSet) => void diff --git a/web/app/components/workflow/nodes/parameter-extractor/components/extract-parameter/update.tsx b/web/app/components/workflow/nodes/parameter-extractor/components/extract-parameter/update.tsx index f7cf7bddad..dbf0839ebf 100644 --- a/web/app/components/workflow/nodes/parameter-extractor/components/extract-parameter/update.tsx +++ b/web/app/components/workflow/nodes/parameter-extractor/components/extract-parameter/update.tsx @@ -28,7 +28,7 @@ const DEFAULT_PARAM: Param = { required: false, } -type Props = { +interface Props { type: 'add' | 'edit' payload?: Param onSave: (payload: Param, moreInfo?: MoreInfo) => void diff --git a/web/app/components/workflow/nodes/parameter-extractor/components/reasoning-mode-picker.tsx b/web/app/components/workflow/nodes/parameter-extractor/components/reasoning-mode-picker.tsx index f4fd6e85a6..8612e8954f 100644 --- a/web/app/components/workflow/nodes/parameter-extractor/components/reasoning-mode-picker.tsx +++ b/web/app/components/workflow/nodes/parameter-extractor/components/reasoning-mode-picker.tsx @@ -8,7 +8,7 @@ import OptionCard from '../../_base/components/option-card' const i18nPrefix = 'workflow.nodes.parameterExtractor' -type Props = { +interface Props { type: ReasoningModeType onChange: (type: ReasoningModeType) => void } diff --git a/web/app/components/workflow/nodes/parameter-extractor/types.ts b/web/app/components/workflow/nodes/parameter-extractor/types.ts index f5ba717be8..f96d26a7af 100644 --- a/web/app/components/workflow/nodes/parameter-extractor/types.ts +++ b/web/app/components/workflow/nodes/parameter-extractor/types.ts @@ -10,7 +10,7 @@ export enum ParamType { arrayObject = 'array[object]', } -export type Param = { +export interface Param { name: string type: ParamType options?: string[] diff --git a/web/app/components/workflow/nodes/start/components/var-item.tsx b/web/app/components/workflow/nodes/start/components/var-item.tsx index 8a94161999..2ccb1edb56 100644 --- a/web/app/components/workflow/nodes/start/components/var-item.tsx +++ b/web/app/components/workflow/nodes/start/components/var-item.tsx @@ -13,7 +13,7 @@ import { Edit03 } from '@/app/components/base/icons/src/vender/solid/general' import Badge from '@/app/components/base/badge' import ConfigVarModal from '@/app/components/app/configuration/config-var/config-modal' -type Props = { +interface Props { readonly: boolean payload: InputVar onChange?: (item: InputVar, moreInfo?: MoreInfo) => void diff --git a/web/app/components/workflow/nodes/variable-assigner/components/var-group-item.tsx b/web/app/components/workflow/nodes/variable-assigner/components/var-group-item.tsx index eb5a8f8e51..e18327a472 100644 --- a/web/app/components/workflow/nodes/variable-assigner/components/var-group-item.tsx +++ b/web/app/components/workflow/nodes/variable-assigner/components/var-group-item.tsx @@ -24,7 +24,7 @@ type Payload = VarGroupItemType & { group_name?: string } -type Props = { +interface Props { readOnly: boolean nodeId: string payload: Payload diff --git a/web/app/components/workflow/panel/chat-variable-panel/components/array-value-list.tsx b/web/app/components/workflow/panel/chat-variable-panel/components/array-value-list.tsx index 8206f02049..b2bfd2a218 100644 --- a/web/app/components/workflow/panel/chat-variable-panel/components/array-value-list.tsx +++ b/web/app/components/workflow/panel/chat-variable-panel/components/array-value-list.tsx @@ -8,7 +8,7 @@ import RemoveButton from '@/app/components/workflow/nodes/_base/components/remov import Button from '@/app/components/base/button' import Input from '@/app/components/base/input' -type Props = { +interface Props { isString: boolean list: any[] onChange: (list: any[]) => void diff --git a/web/app/components/workflow/panel/chat-variable-panel/components/variable-modal.tsx b/web/app/components/workflow/panel/chat-variable-panel/components/variable-modal.tsx index baa42eef24..3c983cd364 100644 --- a/web/app/components/workflow/panel/chat-variable-panel/components/variable-modal.tsx +++ b/web/app/components/workflow/panel/chat-variable-panel/components/variable-modal.tsx @@ -18,13 +18,13 @@ import { ChatVarType } from '@/app/components/workflow/panel/chat-variable-panel import cn from '@/utils/classnames' import { checkKeys } from '@/utils/var' -export type ModalPropsType = { +export interface ModalPropsType { chatVar?: ConversationVariable onClose: () => void onSave: (chatVar: ConversationVariable) => void } -type ObjectValueItem = { +interface ObjectValueItem { key: string type: ChatVarType value: string | number | undefined diff --git a/web/app/components/workflow/panel/debug-and-preview/conversation-variable-modal.tsx b/web/app/components/workflow/panel/debug-and-preview/conversation-variable-modal.tsx index 5991414baf..e60c8afac9 100644 --- a/web/app/components/workflow/panel/debug-and-preview/conversation-variable-modal.tsx +++ b/web/app/components/workflow/panel/debug-and-preview/conversation-variable-modal.tsx @@ -22,7 +22,7 @@ import useTimestamp from '@/hooks/use-timestamp' import { fetchCurrentValueOfConversationVariable } from '@/service/workflow' import cn from '@/utils/classnames' -export type Props = { +export interface Props { conversationID: string onHide: () => void } diff --git a/web/app/components/workflow/panel/env-panel/variable-modal.tsx b/web/app/components/workflow/panel/env-panel/variable-modal.tsx index feabd5a422..da59670a5b 100644 --- a/web/app/components/workflow/panel/env-panel/variable-modal.tsx +++ b/web/app/components/workflow/panel/env-panel/variable-modal.tsx @@ -12,7 +12,7 @@ import type { EnvironmentVariable } from '@/app/components/workflow/types' import cn from '@/utils/classnames' import { checkKeys } from '@/utils/var' -export type ModalPropsType = { +export interface ModalPropsType { env?: EnvironmentVariable onClose: () => void onSave: (env: EnvironmentVariable) => void diff --git a/web/app/components/workflow/panel/inputs-panel.tsx b/web/app/components/workflow/panel/inputs-panel.tsx index 47fec40e60..d7d7f7c5cc 100644 --- a/web/app/components/workflow/panel/inputs-panel.tsx +++ b/web/app/components/workflow/panel/inputs-panel.tsx @@ -25,7 +25,7 @@ import { } from '@/app/components/base/chat/chat/utils' import { useCheckInputsForms } from '@/app/components/base/chat/chat/check-input-forms-hooks' -type Props = { +interface Props { onRun: () => void } diff --git a/web/app/components/workflow/run/result-text.tsx b/web/app/components/workflow/run/result-text.tsx index 27b1f2cd8c..9183226b60 100644 --- a/web/app/components/workflow/run/result-text.tsx +++ b/web/app/components/workflow/run/result-text.tsx @@ -7,7 +7,7 @@ import LoadingAnim from '@/app/components/base/chat/chat/loading-anim' import StatusContainer from '@/app/components/workflow/run/status-container' import { FileList } from '@/app/components/base/file-uploader' -type ResultTextProps = { +interface ResultTextProps { isRunning?: boolean outputs?: any error?: string diff --git a/web/app/components/workflow/run/status.tsx b/web/app/components/workflow/run/status.tsx index ef67cb5467..26fc053446 100644 --- a/web/app/components/workflow/run/status.tsx +++ b/web/app/components/workflow/run/status.tsx @@ -5,7 +5,7 @@ import cn from '@/utils/classnames' import Indicator from '@/app/components/header/indicator' import StatusContainer from '@/app/components/workflow/run/status-container' -type ResultProps = { +interface ResultProps { status: string time?: number tokens?: number diff --git a/web/app/signin/oneMoreStep.tsx b/web/app/signin/oneMoreStep.tsx index 8554b364c0..dfb8a04781 100644 --- a/web/app/signin/oneMoreStep.tsx +++ b/web/app/signin/oneMoreStep.tsx @@ -13,7 +13,7 @@ import { LanguagesSupported, languages } from '@/i18n/language' import { oneMoreStep } from '@/service/common' import Toast from '@/app/components/base/toast' -type IState = { +interface IState { formState: 'processing' | 'error' | 'success' | 'initial' invitation_code: string interface_language: string diff --git a/web/hooks/use-i18n.ts b/web/hooks/use-i18n.ts index 261293c86d..d95ef0d114 100644 --- a/web/hooks/use-i18n.ts +++ b/web/hooks/use-i18n.ts @@ -1,6 +1,7 @@ import { useLanguage } from '@/app/components/header/account-setting/model-provider-page/hooks' export const renderI18nObject = (obj: Record, language: string) => { + if (!obj) return '' if (obj?.[language]) return obj[language] if (obj?.en_US) return obj.en_US return Object.values(obj)[0] diff --git a/web/i18n/hi-IN/share-app.ts b/web/i18n/hi-IN/share-app.ts index 88890f86b8..db6fe5393a 100644 --- a/web/i18n/hi-IN/share-app.ts +++ b/web/i18n/hi-IN/share-app.ts @@ -2,9 +2,6 @@ const translation = { common: { welcome: 'आपका स्वागत है', appUnavailable: 'ऐप उपलब्ध नहीं है', - appUnknownError: 'अज्ञात त्रुटि, कृपया पुनः प्रयास करें', - // @ts-expect-error TODO: fix this - appUnknownError: 'ऐप अनुपलब्ध है', }, chat: { diff --git a/web/models/common.ts b/web/models/common.ts index dc2b1120b9..48bdc8ae44 100644 --- a/web/models/common.ts +++ b/web/models/common.ts @@ -1,23 +1,23 @@ import type { I18nText } from '@/i18n/language' -export type CommonResponse = { +export interface CommonResponse { result: 'success' | 'fail' } -export type OauthResponse = { +export interface OauthResponse { redirect_url: string } -export type SetupStatusResponse = { +export interface SetupStatusResponse { step: 'finished' | 'not_started' setup_at?: Date } -export type InitValidateStatusResponse = { +export interface InitValidateStatusResponse { status: 'finished' | 'not_started' } -export type UserProfileResponse = { +export interface UserProfileResponse { id: string name: string email: string @@ -32,13 +32,13 @@ export type UserProfileResponse = { created_at?: string } -export type UserProfileOriginResponse = { +export interface UserProfileOriginResponse { json: () => Promise bodyUsed: boolean headers: any } -export type LangGeniusVersionResponse = { +export interface LangGeniusVersionResponse { current_version: string latest_version: string version: string @@ -48,7 +48,7 @@ export type LangGeniusVersionResponse = { current_env: string } -export type TenantInfoResponse = { +export interface TenantInfoResponse { name: string created_at: string providers: Array<{ @@ -79,14 +79,14 @@ export enum ProviderName { Tongyi = 'tongyi', ChatGLM = 'chatglm', } -export type ProviderAzureToken = { +export interface ProviderAzureToken { openai_api_base?: string openai_api_key?: string } -export type ProviderAnthropicToken = { +export interface ProviderAnthropicToken { anthropic_api_key?: string } -export type ProviderTokenType = { +export interface ProviderTokenType { [ProviderName.OPENAI]: string [ProviderName.AZURE_OPENAI]: ProviderAzureToken [ProviderName.ANTHROPIC]: ProviderAnthropicToken @@ -109,14 +109,14 @@ export type ProviderHosted = Provider & { quota_used: number } -export type AccountIntegrate = { +export interface AccountIntegrate { provider: 'google' | 'github' created_at: number is_bound: boolean link: string } -export type IWorkspace = { +export interface IWorkspace { id: string name: string plan: string @@ -136,7 +136,7 @@ export type ICurrentWorkspace = Omit & { } } -export type DataSourceNotionPage = { +export interface DataSourceNotionPage { page_icon: null | { type: string | null url: string | null @@ -155,7 +155,7 @@ export type NotionPage = DataSourceNotionPage & { export type DataSourceNotionPageMap = Record -export type DataSourceNotionWorkspace = { +export interface DataSourceNotionWorkspace { workspace_name: string workspace_id: string workspace_icon: string | null @@ -165,7 +165,7 @@ export type DataSourceNotionWorkspace = { export type DataSourceNotionWorkspaceMap = Record -export type DataSourceNotion = { +export interface DataSourceNotion { id: string provider: string is_bound: boolean @@ -180,12 +180,12 @@ export enum DataSourceProvider { jinaReader = 'jinareader', } -export type FirecrawlConfig = { +export interface FirecrawlConfig { api_key: string base_url: string } -export type DataSourceItem = { +export interface DataSourceItem { id: string category: DataSourceCategory provider: DataSourceProvider @@ -194,15 +194,15 @@ export type DataSourceItem = { updated_at: number } -export type DataSources = { +export interface DataSources { sources: DataSourceItem[] } -export type GithubRepo = { +export interface GithubRepo { stargazers_count: number } -export type PluginProvider = { +export interface PluginProvider { tool_name: string is_enabled: boolean credentials: { @@ -210,7 +210,7 @@ export type PluginProvider = { } | null } -export type FileUploadConfigResponse = { +export interface FileUploadConfigResponse { batch_count_limit: number image_file_size_limit?: number | string // default is 10MB file_size_limit: number // default is 15MB @@ -233,14 +233,14 @@ export type InvitationResponse = CommonResponse & { invitation_results: InvitationResult[] } -export type ApiBasedExtension = { +export interface ApiBasedExtension { id?: string name?: string api_endpoint?: string api_key?: string } -export type CodeBasedExtensionForm = { +export interface CodeBasedExtensionForm { type: string label: I18nText variable: string @@ -251,17 +251,17 @@ export type CodeBasedExtensionForm = { max_length?: number } -export type CodeBasedExtensionItem = { +export interface CodeBasedExtensionItem { name: string label: any form_schema: CodeBasedExtensionForm[] } -export type CodeBasedExtension = { +export interface CodeBasedExtension { module: string data: CodeBasedExtensionItem[] } -export type ExternalDataTool = { +export interface ExternalDataTool { type?: string label?: string icon?: string @@ -273,7 +273,7 @@ export type ExternalDataTool = { } & Partial> } -export type ModerateResponse = { +export interface ModerateResponse { flagged: boolean text: string } diff --git a/web/package.json b/web/package.json index 5306c5c31b..d5020f92e7 100644 --- a/web/package.json +++ b/web/package.json @@ -1,6 +1,6 @@ { "name": "dify-web", - "version": "0.15.0", + "version": "1.0.0-beta.1", "private": true, "engines": { "node": ">=18.17.0" @@ -9,7 +9,7 @@ "dev": "NODE_OPTIONS='--inspect' next dev", "build": "next build", "start": "cp -r .next/static .next/standalone/.next/static && cp -r public .next/standalone/public && cross-env PORT=$npm_config_port HOSTNAME=$npm_config_host node .next/standalone/server.js", - "lint": "next lint", + "lint": "pnpm eslint", "fix": "next lint --fix", "eslint-fix": "eslint --fix", "prepare": "cd ../ && node -e \"if (process.env.NODE_ENV !== 'production'){process.exit(1)} \" || husky ./web/.husky", @@ -27,6 +27,7 @@ "@babel/runtime": "^7.22.3", "@dagrejs/dagre": "^1.1.4", "@emoji-mart/data": "^1.2.1", + "@eslint/compat": "^1.2.4", "@floating-ui/react": "^0.26.25", "@formatjs/intl-localematcher": "^0.5.6", "@headlessui/react": "^1.7.13", @@ -86,14 +87,14 @@ "react-error-boundary": "^4.1.2", "react-headless-pagination": "^1.1.6", "react-hook-form": "^7.53.1", + "react-hotkeys-hook": "^4.6.1", "react-i18next": "^15.1.0", "react-infinite-scroll-component": "^6.1.0", "react-markdown": "^9.0.1", "react-multi-email": "^1.0.25", "react-papaparse": "^4.4.0", - "react-slider": "^2.0.6", - "react-hotkeys-hook": "^4.6.1", "react-pdf-highlighter": "^8.0.0-rc.0", + "react-slider": "^2.0.6", "react-sortablejs": "^6.1.4", "react-syntax-highlighter": "^15.6.1", "react-tooltip": "5.8.3", @@ -194,4 +195,4 @@ "eslint --fix" ] } -} +} \ No newline at end of file diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index ef139d7426..0243ab2c56 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -22,6 +22,9 @@ importers: '@emoji-mart/data': specifier: ^1.2.1 version: 1.2.1 + '@eslint/compat': + specifier: ^1.2.4 + version: 1.2.4(eslint@9.13.0(jiti@1.21.6)) '@floating-ui/react': specifier: ^0.26.25 version: 0.26.27(react-dom@18.2.0(react@18.2.0))(react@18.2.0) @@ -1425,8 +1428,8 @@ packages: '@eslint-react/var@1.15.0': resolution: {integrity: sha512-/QycKnbgZRygM/lhHtUFQrvvrswdOyaXfVxwtIFVEYoPHP9q7NaUn0mrBu4VWkXQC9zPk1nWQeC3rZMUxzretg==} - '@eslint/compat@1.2.1': - resolution: {integrity: sha512-JbHG2TWuCeNzh87fXo+/46Z1LEo9DBA9T188d0fZgGxAD+cNyS6sx9fdiyxjGPBMyQVRlCutTByZ6a5+YMkF7g==} + '@eslint/compat@1.2.4': + resolution: {integrity: sha512-S8ZdQj/N69YAtuqFt7653jwcvuUj131+6qGLUyDqfDg1OIoBQ66OCuXC473YQfO2AaxITTutiRQiDwoo7ZLYyg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^9.10.0 @@ -5790,9 +5793,6 @@ packages: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} - lexical@0.16.1: - resolution: {integrity: sha512-+R05d3+N945OY8pTUjTqQrWoApjC+ctzvjnmNETtx9WmVAaiW0tQVG+AYLt5pDGY8dQXtd4RPorvnxBTECt9SA==} - lexical@0.18.0: resolution: {integrity: sha512-3K/B0RpzjoW+Wj2E455wWXxkqxqK8UgdIiuqkOqdOsoSSo5mCkHOU6eVw7Nlmlr1MFvAMzGmz4RPn8NZaLQ2Mw==} @@ -9529,7 +9529,7 @@ snapshots: - supports-color - typescript - '@eslint/compat@1.2.1(eslint@9.13.0(jiti@1.21.6))': + '@eslint/compat@1.2.4(eslint@9.13.0(jiti@1.21.6))': optionalDependencies: eslint: 9.13.0(jiti@1.21.6) @@ -13143,7 +13143,7 @@ snapshots: eslint-config-flat-gitignore@0.3.0(eslint@9.13.0(jiti@1.21.6)): dependencies: - '@eslint/compat': 1.2.1(eslint@9.13.0(jiti@1.21.6)) + '@eslint/compat': 1.2.4(eslint@9.13.0(jiti@1.21.6)) eslint: 9.13.0(jiti@1.21.6) find-up-simple: 1.0.0 @@ -15177,8 +15177,6 @@ snapshots: prelude-ls: 1.2.1 type-check: 0.4.0 - lexical@0.16.1: {} - lexical@0.18.0: {} lib0@0.2.98: diff --git a/web/public/screenshots/light/Agent.png b/web/public/screenshots/light/Agent.png new file mode 100644 index 0000000000..fe596a555f Binary files /dev/null and b/web/public/screenshots/light/Agent.png differ diff --git a/web/public/screenshots/light/Agent@2x.png b/web/public/screenshots/light/Agent@2x.png new file mode 100644 index 0000000000..dda71b29e9 Binary files /dev/null and b/web/public/screenshots/light/Agent@2x.png differ diff --git a/web/public/screenshots/light/Agent@3x.png b/web/public/screenshots/light/Agent@3x.png new file mode 100644 index 0000000000..0d05644eab Binary files /dev/null and b/web/public/screenshots/light/Agent@3x.png differ diff --git a/web/public/screenshots/light/Chatbot.png b/web/public/screenshots/light/Chatbot.png new file mode 100644 index 0000000000..b628a930fb Binary files /dev/null and b/web/public/screenshots/light/Chatbot.png differ diff --git a/web/public/screenshots/light/Chatbot@2x.png b/web/public/screenshots/light/Chatbot@2x.png new file mode 100644 index 0000000000..048a9f9036 Binary files /dev/null and b/web/public/screenshots/light/Chatbot@2x.png differ diff --git a/web/public/screenshots/light/Chatbot@3x.png b/web/public/screenshots/light/Chatbot@3x.png new file mode 100644 index 0000000000..9b7c1f5999 Binary files /dev/null and b/web/public/screenshots/light/Chatbot@3x.png differ diff --git a/web/public/screenshots/light/Chatflow.png b/web/public/screenshots/light/Chatflow.png new file mode 100644 index 0000000000..1753de7763 Binary files /dev/null and b/web/public/screenshots/light/Chatflow.png differ diff --git a/web/public/screenshots/light/Chatflow@2x.png b/web/public/screenshots/light/Chatflow@2x.png new file mode 100644 index 0000000000..6b72a8d732 Binary files /dev/null and b/web/public/screenshots/light/Chatflow@2x.png differ diff --git a/web/public/screenshots/light/Chatflow@3x.png b/web/public/screenshots/light/Chatflow@3x.png new file mode 100644 index 0000000000..7a059af6a4 Binary files /dev/null and b/web/public/screenshots/light/Chatflow@3x.png differ diff --git a/web/public/screenshots/light/TextGenerator.png b/web/public/screenshots/light/TextGenerator.png new file mode 100644 index 0000000000..14973451cc Binary files /dev/null and b/web/public/screenshots/light/TextGenerator.png differ diff --git a/web/public/screenshots/light/TextGenerator@2x.png b/web/public/screenshots/light/TextGenerator@2x.png new file mode 100644 index 0000000000..7e1baae97b Binary files /dev/null and b/web/public/screenshots/light/TextGenerator@2x.png differ diff --git a/web/public/screenshots/light/TextGenerator@3x.png b/web/public/screenshots/light/TextGenerator@3x.png new file mode 100644 index 0000000000..746e9ac1be Binary files /dev/null and b/web/public/screenshots/light/TextGenerator@3x.png differ diff --git a/web/public/screenshots/light/Workflow.png b/web/public/screenshots/light/Workflow.png new file mode 100644 index 0000000000..a82c9a6a4d Binary files /dev/null and b/web/public/screenshots/light/Workflow.png differ diff --git a/web/public/screenshots/light/Workflow@2x.png b/web/public/screenshots/light/Workflow@2x.png new file mode 100644 index 0000000000..0a1a19435b Binary files /dev/null and b/web/public/screenshots/light/Workflow@2x.png differ diff --git a/web/public/screenshots/light/Workflow@3x.png b/web/public/screenshots/light/Workflow@3x.png new file mode 100644 index 0000000000..914ce45003 Binary files /dev/null and b/web/public/screenshots/light/Workflow@3x.png differ diff --git a/web/service/use-plugins.ts b/web/service/use-plugins.ts index 66e55c6431..3feb66df8d 100644 --- a/web/service/use-plugins.ts +++ b/web/service/use-plugins.ts @@ -1,4 +1,4 @@ -import { useCallback, useState } from 'react' +import { useCallback } from 'react' import type { DebugInfo as DebugInfoTypes, Dependency, @@ -305,7 +305,8 @@ export const useMutationPluginsFromMarketplace = () => { page = 1, pageSize = 40, } = pluginsSearchParams - return postMarketplace<{ data: PluginsFromMarketplaceResponse }>('/plugins/search/basic', { + const pluginOrBundle = type === 'bundle' ? 'bundles' : 'plugins' + return postMarketplace<{ data: PluginsFromMarketplaceResponse }>(`/${pluginOrBundle}/search/basic`, { body: { page, page_size: pageSize, @@ -355,7 +356,6 @@ export const useFetchPluginsInMarketPlaceByInfo = (infos: Record[]) const usePluginTaskListKey = [NAME_SPACE, 'pluginTaskList'] export const usePluginTaskList = () => { - const [enabled, setEnabled] = useState(true) const { data, isFetched, @@ -363,20 +363,17 @@ export const usePluginTaskList = () => { ...rest } = useQuery({ queryKey: usePluginTaskListKey, - queryFn: async () => { - const currentData = await get<{ tasks: PluginTask[] }>('/workspaces/current/plugin/tasks?page=1&page_size=100') - const taskDone = currentData.tasks.every(task => task.status === TaskStatus.success || task.status === TaskStatus.failed) - + queryFn: () => get<{ tasks: PluginTask[] }>('/workspaces/current/plugin/tasks?page=1&page_size=100'), + refetchInterval: (lastQuery) => { + const lastData = lastQuery.state.data + const taskDone = lastData?.tasks.every(task => task.status === TaskStatus.success || task.status === TaskStatus.failed) if (taskDone) - setEnabled(false) + return false - return currentData + return 5000 }, - refetchInterval: 5000, - enabled, }) const handleRefetch = useCallback(() => { - setEnabled(true) refetch() }, [refetch])