diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index 28ef67a133..4b76f82375 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -1,6 +1,7 @@ name: Run Pytest on: + workflow_call: pull_request: branches: - main diff --git a/.github/workflows/db-migration-test.yml b/.github/workflows/db-migration-test.yml index e8ff85e95c..25f37dec93 100644 --- a/.github/workflows/db-migration-test.yml +++ b/.github/workflows/db-migration-test.yml @@ -1,6 +1,7 @@ name: DB Migration Test on: + workflow_call: pull_request: branches: - main diff --git a/.github/workflows/main-ci.yml b/.github/workflows/main-ci.yml new file mode 100644 index 0000000000..4cd1f8e738 --- /dev/null +++ b/.github/workflows/main-ci.yml @@ -0,0 +1,129 @@ +name: Main CI Pipeline + +on: + pull_request: + branches: [ "main" ] + +permissions: + contents: write + pull-requests: write + checks: write + +concurrency: + group: main-ci-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + # First, run autofix if needed + autofix: + name: Auto-fix code issues + if: github.repository == 'langgenius/dify' + runs-on: ubuntu-latest + outputs: + changes-made: ${{ steps.check-changes.outputs.changes }} + steps: + - uses: actions/checkout@v4 + with: + token: ${{ secrets.GITHUB_TOKEN }} + ref: ${{ github.event.pull_request.head.ref }} + + - uses: astral-sh/setup-uv@v6 + with: + python-version: "3.12" + + - name: Run Python fixes + run: | + cd api + uv sync --dev + # Fix lint errors + uv run ruff check --fix-only . + # Format code + uv run ruff format . + + - name: Run ast-grep + run: | + uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all + + - name: Run mdformat + run: | + uvx mdformat . + + - name: Check for changes + id: check-changes + run: | + if [ -n "$(git diff --name-only)" ]; then + echo "changes=true" >> $GITHUB_OUTPUT + else + echo "changes=false" >> $GITHUB_OUTPUT + fi + + - name: Commit and push changes + if: steps.check-changes.outputs.changes == 'true' + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git add -A + git commit -m "Auto-fix: Apply code formatting and linting fixes" + git push + + # Check which paths were changed to determine which tests to run + check-changes: + name: Check Changed Files + runs-on: ubuntu-latest + outputs: + api-changed: ${{ steps.changes.outputs.api }} + web-changed: ${{ steps.changes.outputs.web }} + vdb-changed: ${{ steps.changes.outputs.vdb }} + migration-changed: ${{ steps.changes.outputs.migration }} + steps: + - uses: actions/checkout@v4 + - uses: dorny/paths-filter@v3 + id: changes + with: + filters: | + api: + - 'api/**' + - 'docker/**' + - '.github/workflows/api-tests.yml' + web: + - 'web/**' + vdb: + - 'api/core/rag/datasource/**' + - 'docker/**' + - '.github/workflows/vdb-tests.yml' + - 'api/uv.lock' + - 'api/pyproject.toml' + migration: + - 'api/migrations/**' + - '.github/workflows/db-migration-test.yml' + + # After autofix completes (or if no changes needed), run tests in parallel + api-tests: + name: API Tests + needs: [autofix, check-changes] + if: always() && !cancelled() && needs.check-changes.outputs.api-changed == 'true' + uses: ./.github/workflows/api-tests.yml + + web-tests: + name: Web Tests + needs: [autofix, check-changes] + if: always() && !cancelled() && needs.check-changes.outputs.web-changed == 'true' + uses: ./.github/workflows/web-tests.yml + + style-check: + name: Style Check + needs: autofix + if: always() && !cancelled() + uses: ./.github/workflows/style.yml + + vdb-tests: + name: VDB Tests + needs: [autofix, check-changes] + if: always() && !cancelled() && needs.check-changes.outputs.vdb-changed == 'true' + uses: ./.github/workflows/vdb-tests.yml + + db-migration-test: + name: DB Migration Test + needs: [autofix, check-changes] + if: always() && !cancelled() && needs.check-changes.outputs.migration-changed == 'true' + uses: ./.github/workflows/db-migration-test.yml \ No newline at end of file diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index 8d0ec35ca1..dd5bb74946 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -1,6 +1,7 @@ name: Style check on: + workflow_call: pull_request: branches: - main diff --git a/.github/workflows/vdb-tests.yml b/.github/workflows/vdb-tests.yml index f2ca09fba2..b741df547e 100644 --- a/.github/workflows/vdb-tests.yml +++ b/.github/workflows/vdb-tests.yml @@ -1,6 +1,7 @@ name: Run VDB Tests on: + workflow_call: pull_request: branches: - main diff --git a/.github/workflows/web-tests.yml b/.github/workflows/web-tests.yml index d104d69947..61f10d445d 100644 --- a/.github/workflows/web-tests.yml +++ b/.github/workflows/web-tests.yml @@ -1,6 +1,7 @@ name: Web Tests on: + workflow_call: pull_request: branches: - main diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index e25f92399c..5ad7645969 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -70,7 +70,7 @@ from .app import ( ) # Import auth controllers -from .auth import activate, data_source_bearer_auth, data_source_oauth, forgot_password, login, oauth +from .auth import activate, data_source_bearer_auth, data_source_oauth, forgot_password, login, oauth, oauth_server # Import billing controllers from .billing import billing, compliance diff --git a/api/controllers/console/auth/oauth_server.py b/api/controllers/console/auth/oauth_server.py new file mode 100644 index 0000000000..19ca464a79 --- /dev/null +++ b/api/controllers/console/auth/oauth_server.py @@ -0,0 +1,189 @@ +from functools import wraps +from typing import cast + +import flask_login +from flask import request +from flask_restx import Resource, reqparse +from werkzeug.exceptions import BadRequest, NotFound + +from controllers.console.wraps import account_initialization_required, setup_required +from core.model_runtime.utils.encoders import jsonable_encoder +from libs.login import login_required +from models.account import Account +from models.model import OAuthProviderApp +from services.oauth_server import OAUTH_ACCESS_TOKEN_EXPIRES_IN, OAuthGrantType, OAuthServerService + +from .. import api + + +def oauth_server_client_id_required(view): + @wraps(view) + def decorated(*args, **kwargs): + parser = reqparse.RequestParser() + parser.add_argument("client_id", type=str, required=True, location="json") + parsed_args = parser.parse_args() + client_id = parsed_args.get("client_id") + if not client_id: + raise BadRequest("client_id is required") + + oauth_provider_app = OAuthServerService.get_oauth_provider_app(client_id) + if not oauth_provider_app: + raise NotFound("client_id is invalid") + + kwargs["oauth_provider_app"] = oauth_provider_app + + return view(*args, **kwargs) + + return decorated + + +def oauth_server_access_token_required(view): + @wraps(view) + def decorated(*args, **kwargs): + oauth_provider_app = kwargs.get("oauth_provider_app") + if not oauth_provider_app or not isinstance(oauth_provider_app, OAuthProviderApp): + raise BadRequest("Invalid oauth_provider_app") + + if not request.headers.get("Authorization"): + raise BadRequest("Authorization is required") + + authorization_header = request.headers.get("Authorization") + if not authorization_header: + raise BadRequest("Authorization header is required") + + parts = authorization_header.split(" ") + if len(parts) != 2: + raise BadRequest("Invalid Authorization header format") + + token_type = parts[0] + if token_type != "Bearer": + raise BadRequest("token_type is invalid") + + access_token = parts[1] + if not access_token: + raise BadRequest("access_token is required") + + account = OAuthServerService.validate_oauth_access_token(oauth_provider_app.client_id, access_token) + if not account: + raise BadRequest("access_token or client_id is invalid") + + kwargs["account"] = account + + return view(*args, **kwargs) + + return decorated + + +class OAuthServerAppApi(Resource): + @setup_required + @oauth_server_client_id_required + def post(self, oauth_provider_app: OAuthProviderApp): + parser = reqparse.RequestParser() + parser.add_argument("redirect_uri", type=str, required=True, location="json") + parsed_args = parser.parse_args() + redirect_uri = parsed_args.get("redirect_uri") + + # check if redirect_uri is valid + if redirect_uri not in oauth_provider_app.redirect_uris: + raise BadRequest("redirect_uri is invalid") + + return jsonable_encoder( + { + "app_icon": oauth_provider_app.app_icon, + "app_label": oauth_provider_app.app_label, + "scope": oauth_provider_app.scope, + } + ) + + +class OAuthServerUserAuthorizeApi(Resource): + @setup_required + @login_required + @account_initialization_required + @oauth_server_client_id_required + def post(self, oauth_provider_app: OAuthProviderApp): + account = cast(Account, flask_login.current_user) + user_account_id = account.id + + code = OAuthServerService.sign_oauth_authorization_code(oauth_provider_app.client_id, user_account_id) + return jsonable_encoder( + { + "code": code, + } + ) + + +class OAuthServerUserTokenApi(Resource): + @setup_required + @oauth_server_client_id_required + def post(self, oauth_provider_app: OAuthProviderApp): + parser = reqparse.RequestParser() + parser.add_argument("grant_type", type=str, required=True, location="json") + parser.add_argument("code", type=str, required=False, location="json") + parser.add_argument("client_secret", type=str, required=False, location="json") + parser.add_argument("redirect_uri", type=str, required=False, location="json") + parser.add_argument("refresh_token", type=str, required=False, location="json") + parsed_args = parser.parse_args() + + grant_type = OAuthGrantType(parsed_args["grant_type"]) + + if grant_type == OAuthGrantType.AUTHORIZATION_CODE: + if not parsed_args["code"]: + raise BadRequest("code is required") + + if parsed_args["client_secret"] != oauth_provider_app.client_secret: + raise BadRequest("client_secret is invalid") + + if parsed_args["redirect_uri"] not in oauth_provider_app.redirect_uris: + raise BadRequest("redirect_uri is invalid") + + access_token, refresh_token = OAuthServerService.sign_oauth_access_token( + grant_type, code=parsed_args["code"], client_id=oauth_provider_app.client_id + ) + return jsonable_encoder( + { + "access_token": access_token, + "token_type": "Bearer", + "expires_in": OAUTH_ACCESS_TOKEN_EXPIRES_IN, + "refresh_token": refresh_token, + } + ) + elif grant_type == OAuthGrantType.REFRESH_TOKEN: + if not parsed_args["refresh_token"]: + raise BadRequest("refresh_token is required") + + access_token, refresh_token = OAuthServerService.sign_oauth_access_token( + grant_type, refresh_token=parsed_args["refresh_token"], client_id=oauth_provider_app.client_id + ) + return jsonable_encoder( + { + "access_token": access_token, + "token_type": "Bearer", + "expires_in": OAUTH_ACCESS_TOKEN_EXPIRES_IN, + "refresh_token": refresh_token, + } + ) + else: + raise BadRequest("invalid grant_type") + + +class OAuthServerUserAccountApi(Resource): + @setup_required + @oauth_server_client_id_required + @oauth_server_access_token_required + def post(self, oauth_provider_app: OAuthProviderApp, account: Account): + return jsonable_encoder( + { + "name": account.name, + "email": account.email, + "avatar": account.avatar, + "interface_language": account.interface_language, + "timezone": account.timezone, + } + ) + + +api.add_resource(OAuthServerAppApi, "/oauth/provider") +api.add_resource(OAuthServerUserAuthorizeApi, "/oauth/provider/authorize") +api.add_resource(OAuthServerUserTokenApi, "/oauth/provider/token") +api.add_resource(OAuthServerUserAccountApi, "/oauth/provider/account") diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index d52db445ca..f676374e5f 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -318,10 +318,6 @@ class DatasetApi(DatasetApiResource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) data = marshal(dataset, dataset_detail_fields) - if data.get("permission") == "partial_members": - part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str) - data.update({"partial_member_list": part_users_list}) - # check embedding setting provider_manager = ProviderManager() assert isinstance(current_user, Account) diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index ba1fef27b0..3dc08a0b23 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -1,4 +1,5 @@ import logging +import re import time from collections.abc import Callable, Generator, Mapping from contextlib import contextmanager @@ -368,7 +369,7 @@ class AdvancedChatAppGenerateTaskPipeline: ) -> Generator[StreamResponse, None, None]: """Handle node succeeded events.""" # Record files if it's an answer node or end node - if event.node_type in [NodeType.ANSWER, NodeType.END]: + if event.node_type in [NodeType.ANSWER, NodeType.END, NodeType.LLM]: self._recorded_files.extend( self._workflow_response_converter.fetch_files_from_node_outputs(event.outputs or {}) ) @@ -843,7 +844,14 @@ class AdvancedChatAppGenerateTaskPipeline: def _save_message(self, *, session: Session, graph_runtime_state: Optional[GraphRuntimeState] = None) -> None: message = self._get_message(session=session) - message.answer = self._task_state.answer + + # If there are assistant files, remove markdown image links from answer + answer_text = self._task_state.answer + if self._recorded_files: + # Remove markdown image links since we're storing files separately + answer_text = re.sub(r"!\[.*?\]\(.*?\)", "", answer_text).strip() + + message.answer = answer_text message.updated_at = naive_utc_now() message.provider_response_latency = time.perf_counter() - self._base_task_pipeline._start_at message.message_metadata = self._task_state.metadata.model_dump_json() diff --git a/api/core/app/apps/base_app_generator.py b/api/core/app/apps/base_app_generator.py index 4ca8bc2c10..9d7317da36 100644 --- a/api/core/app/apps/base_app_generator.py +++ b/api/core/app/apps/base_app_generator.py @@ -1,4 +1,3 @@ -import json from collections.abc import Generator, Mapping, Sequence from typing import TYPE_CHECKING, Any, Optional, Union, final @@ -14,6 +13,7 @@ from core.workflow.repositories.draft_variable_repository import ( NoopDraftVariableSaver, ) from factories import file_factory +from libs.orjson import orjson_dumps from services.workflow_draft_variable_service import DraftVariableSaver as DraftVariableSaverImpl if TYPE_CHECKING: @@ -174,7 +174,7 @@ class BaseAppGenerator: def gen(): for message in generator: if isinstance(message, Mapping | dict): - yield f"data: {json.dumps(message)}\n\n" + yield f"data: {orjson_dumps(message)}\n\n" else: yield f"event: {message}\n\n" diff --git a/api/core/file/file_manager.py b/api/core/file/file_manager.py index 770014aa72..e3fd175d95 100644 --- a/api/core/file/file_manager.py +++ b/api/core/file/file_manager.py @@ -88,6 +88,7 @@ def to_prompt_message_content( "url": _to_url(f) if dify_config.MULTIMODAL_SEND_FORMAT == "url" else "", "format": f.extension.removeprefix("."), "mime_type": f.mime_type, + "filename": f.filename or "", } if f.type == FileType.IMAGE: params["detail"] = image_detail_config or ImagePromptMessageContent.DETAIL.LOW diff --git a/api/core/memory/token_buffer_memory.py b/api/core/memory/token_buffer_memory.py index 4a0db9d092..ab12b1a846 100644 --- a/api/core/memory/token_buffer_memory.py +++ b/api/core/memory/token_buffer_memory.py @@ -32,6 +32,65 @@ class TokenBufferMemory: self.conversation = conversation self.model_instance = model_instance + def _build_prompt_message_with_files( + self, message_files: list[MessageFile], text_content: str, message: Message, app_record, is_user_message: bool + ) -> PromptMessage: + """ + Build prompt message with files. + :param message_files: list of MessageFile objects + :param text_content: text content of the message + :param message: Message object + :param app_record: app record + :param is_user_message: whether this is a user message + :return: PromptMessage + """ + if self.conversation.mode in {AppMode.AGENT_CHAT, AppMode.COMPLETION, AppMode.CHAT}: + file_extra_config = FileUploadConfigManager.convert(self.conversation.model_config) + elif self.conversation.mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}: + workflow_run = db.session.scalar(select(WorkflowRun).where(WorkflowRun.id == message.workflow_run_id)) + if not workflow_run: + raise ValueError(f"Workflow run not found: {message.workflow_run_id}") + workflow = db.session.scalar(select(Workflow).where(Workflow.id == workflow_run.workflow_id)) + if not workflow: + raise ValueError(f"Workflow not found: {workflow_run.workflow_id}") + file_extra_config = FileUploadConfigManager.convert(workflow.features_dict, is_vision=False) + else: + raise AssertionError(f"Invalid app mode: {self.conversation.mode}") + + detail = ImagePromptMessageContent.DETAIL.HIGH + if file_extra_config and app_record: + # Build files directly without filtering by belongs_to + file_objs = [ + file_factory.build_from_message_file( + message_file=message_file, tenant_id=app_record.tenant_id, config=file_extra_config + ) + for message_file in message_files + ] + if file_extra_config.image_config and file_extra_config.image_config.detail: + detail = file_extra_config.image_config.detail + else: + file_objs = [] + + if not file_objs: + if is_user_message: + return UserPromptMessage(content=text_content) + else: + return AssistantPromptMessage(content=text_content) + else: + prompt_message_contents: list[PromptMessageContentUnionTypes] = [] + for file in file_objs: + prompt_message = file_manager.to_prompt_message_content( + file, + image_detail_config=detail, + ) + prompt_message_contents.append(prompt_message) + prompt_message_contents.append(TextPromptMessageContent(data=text_content)) + + if is_user_message: + return UserPromptMessage(content=prompt_message_contents) + else: + return AssistantPromptMessage(content=prompt_message_contents) + def get_history_prompt_messages( self, max_token_limit: int = 2000, message_limit: Optional[int] = None ) -> Sequence[PromptMessage]: @@ -40,82 +99,73 @@ class TokenBufferMemory: :param max_token_limit: max token limit :param message_limit: message limit """ - with Session(db.engine) as session: - app_record = self.conversation.app + app_record = self.conversation.app - # fetch limited messages, and return reversed - stmt = ( - select(Message) - .where(Message.conversation_id == self.conversation.id) - .order_by(Message.created_at.desc()) + # fetch limited messages, and return reversed + stmt = ( + select(Message).where(Message.conversation_id == self.conversation.id).order_by(Message.created_at.desc()) + ) + + if message_limit and message_limit > 0: + message_limit = min(message_limit, 500) + else: + message_limit = 500 + + stmt = stmt.limit(message_limit) + + messages = db.session.scalars(stmt).all() + + # instead of all messages from the conversation, we only need to extract messages + # that belong to the thread of last message + thread_messages = extract_thread_messages(messages) + + # for newly created message, its answer is temporarily empty, we don't need to add it to memory + if thread_messages and not thread_messages[0].answer and thread_messages[0].answer_tokens == 0: + thread_messages.pop(0) + + messages = list(reversed(thread_messages)) + + prompt_messages: list[PromptMessage] = [] + for message in messages: + # Process user message with files + user_files = ( + db.session.query(MessageFile) + .where( + MessageFile.message_id == message.id, + (MessageFile.belongs_to == "user") | (MessageFile.belongs_to.is_(None)), + ) + .all() ) - if message_limit and message_limit > 0: - message_limit = min(message_limit, 500) + if user_files: + user_prompt_message = self._build_prompt_message_with_files( + message_files=user_files, + text_content=message.query, + message=message, + app_record=app_record, + is_user_message=True, + ) + prompt_messages.append(user_prompt_message) else: - message_limit = 500 + prompt_messages.append(UserPromptMessage(content=message.query)) - stmt = stmt.limit(message_limit) - - messages = session.scalars(stmt).all() - - # instead of all messages from the conversation, we only need to extract messages - # that belong to the thread of last message - thread_messages = extract_thread_messages(messages) - - # for newly created message, its answer is temporarily empty, we don't need to add it to memory - if thread_messages and not thread_messages[0].answer and thread_messages[0].answer_tokens == 0: - thread_messages.pop(0) - - messages = list(reversed(thread_messages)) - - prompt_messages: list[PromptMessage] = [] - for message in messages: - files = session.query(MessageFile).where(MessageFile.message_id == message.id).all() - if files: - file_extra_config = None - if self.conversation.mode in {AppMode.AGENT_CHAT, AppMode.COMPLETION, AppMode.CHAT}: - file_extra_config = FileUploadConfigManager.convert(self.conversation.model_config) - elif self.conversation.mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}: - workflow_run = session.scalar( - select(WorkflowRun).where(WorkflowRun.id == message.workflow_run_id) - ) - if not workflow_run: - raise ValueError(f"Workflow run not found: {message.workflow_run_id}") - workflow = session.scalar(select(Workflow).where(Workflow.id == workflow_run.workflow_id)) - if not workflow: - raise ValueError(f"Workflow not found: {workflow_run.workflow_id}") - file_extra_config = FileUploadConfigManager.convert(workflow.features_dict, is_vision=False) - else: - raise AssertionError(f"Invalid app mode: {self.conversation.mode}") - - detail = ImagePromptMessageContent.DETAIL.LOW - if file_extra_config and app_record: - file_objs = file_factory.build_from_message_files( - message_files=files, tenant_id=app_record.tenant_id, config=file_extra_config - ) - if file_extra_config.image_config and file_extra_config.image_config.detail: - detail = file_extra_config.image_config.detail - else: - file_objs = [] - - if not file_objs: - prompt_messages.append(UserPromptMessage(content=message.query)) - else: - prompt_message_contents: list[PromptMessageContentUnionTypes] = [] - for file in file_objs: - prompt_message = file_manager.to_prompt_message_content( - file, - image_detail_config=detail, - ) - prompt_message_contents.append(prompt_message) - prompt_message_contents.append(TextPromptMessageContent(data=message.query)) - - prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) - - else: - prompt_messages.append(UserPromptMessage(content=message.query)) + # Process assistant message with files + assistant_files = ( + db.session.query(MessageFile) + .where(MessageFile.message_id == message.id, MessageFile.belongs_to == "assistant") + .all() + ) + if assistant_files: + assistant_prompt_message = self._build_prompt_message_with_files( + message_files=assistant_files, + text_content=message.answer, + message=message, + app_record=app_record, + is_user_message=False, + ) + prompt_messages.append(assistant_prompt_message) + else: prompt_messages.append(AssistantPromptMessage(content=message.answer)) if not prompt_messages: diff --git a/api/core/model_runtime/entities/message_entities.py b/api/core/model_runtime/entities/message_entities.py index 83dc7f0525..7cd2e6a3d1 100644 --- a/api/core/model_runtime/entities/message_entities.py +++ b/api/core/model_runtime/entities/message_entities.py @@ -87,6 +87,7 @@ class MultiModalPromptMessageContent(PromptMessageContent): base64_data: str = Field(default="", description="the base64 data of multi-modal file") url: str = Field(default="", description="the url of multi-modal file") mime_type: str = Field(default=..., description="the mime type of multi-modal file") + filename: str = Field(default="", description="the filename of multi-modal file") @property def data(self): diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py index a0ff33ab65..0ea7d3ae1e 100644 --- a/api/factories/file_factory.py +++ b/api/factories/file_factory.py @@ -41,8 +41,14 @@ def build_from_message_file( "url": message_file.url, "id": message_file.id, "type": message_file.type, - "upload_file_id": message_file.upload_file_id, } + + # Set the correct ID field based on transfer method + if message_file.transfer_method == FileTransferMethod.TOOL_FILE.value: + mapping["tool_file_id"] = message_file.upload_file_id + else: + mapping["upload_file_id"] = message_file.upload_file_id + return build_from_mapping( mapping=mapping, tenant_id=tenant_id, @@ -318,6 +324,11 @@ def _is_file_valid_with_config( file_transfer_method: FileTransferMethod, config: FileUploadConfig, ) -> bool: + # FIXME(QIN2DIM): Always allow tool files (files generated by the assistant/model) + # These are internally generated and should bypass user upload restrictions + if file_transfer_method == FileTransferMethod.TOOL_FILE: + return True + if ( config.allowed_file_types and input_file_type not in config.allowed_file_types diff --git a/api/libs/orjson.py b/api/libs/orjson.py new file mode 100644 index 0000000000..2fc5ce8dd3 --- /dev/null +++ b/api/libs/orjson.py @@ -0,0 +1,11 @@ +from typing import Any, Optional + +import orjson + + +def orjson_dumps( + obj: Any, + encoding: str = "utf-8", + option: Optional[int] = None, +) -> str: + return orjson.dumps(obj, option=option).decode(encoding) diff --git a/api/migrations/versions/2025_08_20_1747-8d289573e1da_add_oauth_provider_apps.py b/api/migrations/versions/2025_08_20_1747-8d289573e1da_add_oauth_provider_apps.py new file mode 100644 index 0000000000..5986853f01 --- /dev/null +++ b/api/migrations/versions/2025_08_20_1747-8d289573e1da_add_oauth_provider_apps.py @@ -0,0 +1,45 @@ +"""empty message + +Revision ID: 8d289573e1da +Revises: fa8b0fa6f407 +Create Date: 2025-08-20 17:47:17.015695 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '8d289573e1da' +down_revision = '0e154742a5fa' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('oauth_provider_apps', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('app_icon', sa.String(length=255), nullable=False), + sa.Column('app_label', sa.JSON(), server_default='{}', nullable=False), + sa.Column('client_id', sa.String(length=255), nullable=False), + sa.Column('client_secret', sa.String(length=255), nullable=False), + sa.Column('redirect_uris', sa.JSON(), server_default='[]', nullable=False), + sa.Column('scope', sa.String(length=255), server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='oauth_provider_app_pkey') + ) + with op.batch_alter_table('oauth_provider_apps', schema=None) as batch_op: + batch_op.create_index('oauth_provider_app_client_id_idx', ['client_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('oauth_provider_apps', schema=None) as batch_op: + batch_op.drop_index('oauth_provider_app_client_id_idx') + + op.drop_table('oauth_provider_apps') + # ### end Alembic commands ### diff --git a/api/models/model.py b/api/models/model.py index 4512c72137..cd4bc44451 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -580,6 +580,32 @@ class InstalledApp(Base): return tenant +class OAuthProviderApp(Base): + """ + Globally shared OAuth provider app information. + Only for Dify Cloud. + """ + + __tablename__ = "oauth_provider_apps" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="oauth_provider_app_pkey"), + sa.Index("oauth_provider_app_client_id_idx", "client_id"), + ) + + id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + app_icon = mapped_column(String(255), nullable=False) + app_label = mapped_column(sa.JSON, nullable=False, server_default="{}") + client_id = mapped_column(String(255), nullable=False) + client_secret = mapped_column(String(255), nullable=False) + redirect_uris = mapped_column(sa.JSON, nullable=False, server_default="[]") + scope = mapped_column( + String(255), + nullable=False, + server_default=sa.text("'read:name read:email read:avatar read:interface_language read:timezone'"), + ) + created_at = mapped_column(sa.DateTime, nullable=False, server_default=sa.text("CURRENT_TIMESTAMP(0)")) + + class Conversation(Base): __tablename__ = "conversations" __table_args__ = ( diff --git a/api/services/oauth_server.py b/api/services/oauth_server.py new file mode 100644 index 0000000000..b722dbee22 --- /dev/null +++ b/api/services/oauth_server.py @@ -0,0 +1,94 @@ +import enum +import uuid + +from sqlalchemy import select +from sqlalchemy.orm import Session +from werkzeug.exceptions import BadRequest + +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.account import Account +from models.model import OAuthProviderApp +from services.account_service import AccountService + + +class OAuthGrantType(enum.StrEnum): + AUTHORIZATION_CODE = "authorization_code" + REFRESH_TOKEN = "refresh_token" + + +OAUTH_AUTHORIZATION_CODE_REDIS_KEY = "oauth_provider:{client_id}:authorization_code:{code}" +OAUTH_ACCESS_TOKEN_REDIS_KEY = "oauth_provider:{client_id}:access_token:{token}" +OAUTH_ACCESS_TOKEN_EXPIRES_IN = 60 * 60 * 12 # 12 hours +OAUTH_REFRESH_TOKEN_REDIS_KEY = "oauth_provider:{client_id}:refresh_token:{token}" +OAUTH_REFRESH_TOKEN_EXPIRES_IN = 60 * 60 * 24 * 30 # 30 days + + +class OAuthServerService: + @staticmethod + def get_oauth_provider_app(client_id: str) -> OAuthProviderApp | None: + query = select(OAuthProviderApp).where(OAuthProviderApp.client_id == client_id) + + with Session(db.engine) as session: + return session.execute(query).scalar_one_or_none() + + @staticmethod + def sign_oauth_authorization_code(client_id: str, user_account_id: str) -> str: + code = str(uuid.uuid4()) + redis_key = OAUTH_AUTHORIZATION_CODE_REDIS_KEY.format(client_id=client_id, code=code) + redis_client.set(redis_key, user_account_id, ex=60 * 10) # 10 minutes + return code + + @staticmethod + def sign_oauth_access_token( + grant_type: OAuthGrantType, + code: str = "", + client_id: str = "", + refresh_token: str = "", + ) -> tuple[str, str]: + match grant_type: + case OAuthGrantType.AUTHORIZATION_CODE: + redis_key = OAUTH_AUTHORIZATION_CODE_REDIS_KEY.format(client_id=client_id, code=code) + user_account_id = redis_client.get(redis_key) + if not user_account_id: + raise BadRequest("invalid code") + + # delete code + redis_client.delete(redis_key) + + access_token = OAuthServerService._sign_oauth_access_token(client_id, user_account_id) + refresh_token = OAuthServerService._sign_oauth_refresh_token(client_id, user_account_id) + return access_token, refresh_token + case OAuthGrantType.REFRESH_TOKEN: + redis_key = OAUTH_REFRESH_TOKEN_REDIS_KEY.format(client_id=client_id, token=refresh_token) + user_account_id = redis_client.get(redis_key) + if not user_account_id: + raise BadRequest("invalid refresh token") + + access_token = OAuthServerService._sign_oauth_access_token(client_id, user_account_id) + return access_token, refresh_token + + @staticmethod + def _sign_oauth_access_token(client_id: str, user_account_id: str) -> str: + token = str(uuid.uuid4()) + redis_key = OAUTH_ACCESS_TOKEN_REDIS_KEY.format(client_id=client_id, token=token) + redis_client.set(redis_key, user_account_id, ex=OAUTH_ACCESS_TOKEN_EXPIRES_IN) + return token + + @staticmethod + def _sign_oauth_refresh_token(client_id: str, user_account_id: str) -> str: + token = str(uuid.uuid4()) + redis_key = OAUTH_REFRESH_TOKEN_REDIS_KEY.format(client_id=client_id, token=token) + redis_client.set(redis_key, user_account_id, ex=OAUTH_REFRESH_TOKEN_EXPIRES_IN) + return token + + @staticmethod + def validate_oauth_access_token(client_id: str, token: str) -> Account | None: + redis_key = OAUTH_ACCESS_TOKEN_REDIS_KEY.format(client_id=client_id, token=token) + user_account_id = redis_client.get(redis_key) + if not user_account_id: + return None + + user_id_str = user_account_id.decode("utf-8") + + return AccountService.load_user(user_id_str) diff --git a/api/uv.lock b/api/uv.lock index 96016e665a..0ac55d9394 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -3963,40 +3963,40 @@ wheels = [ [[package]] name = "orjson" -version = "3.10.18" +version = "3.11.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/81/0b/fea456a3ffe74e70ba30e01ec183a9b26bec4d497f61dcfce1b601059c60/orjson-3.10.18.tar.gz", hash = "sha256:e8da3947d92123eda795b68228cafe2724815621fe35e8e320a9e9593a4bcd53", size = 5422810 } +sdist = { url = "https://files.pythonhosted.org/packages/be/4d/8df5f83256a809c22c4d6792ce8d43bb503be0fb7a8e4da9025754b09658/orjson-3.11.3.tar.gz", hash = "sha256:1c0603b1d2ffcd43a411d64797a19556ef76958aef1c182f22dc30860152a98a", size = 5482394, upload-time = "2025-08-26T17:46:43.171Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/97/c7/c54a948ce9a4278794f669a353551ce7db4ffb656c69a6e1f2264d563e50/orjson-3.10.18-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e0a183ac3b8e40471e8d843105da6fbe7c070faab023be3b08188ee3f85719b8", size = 248929 }, - { url = "https://files.pythonhosted.org/packages/9e/60/a9c674ef1dd8ab22b5b10f9300e7e70444d4e3cda4b8258d6c2488c32143/orjson-3.10.18-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5ef7c164d9174362f85238d0cd4afdeeb89d9e523e4651add6a5d458d6f7d42d", size = 133364 }, - { url = "https://files.pythonhosted.org/packages/c1/4e/f7d1bdd983082216e414e6d7ef897b0c2957f99c545826c06f371d52337e/orjson-3.10.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd14c5d99cdc7bf93f22b12ec3b294931518aa019e2a147e8aa2f31fd3240f7", size = 136995 }, - { url = "https://files.pythonhosted.org/packages/17/89/46b9181ba0ea251c9243b0c8ce29ff7c9796fa943806a9c8b02592fce8ea/orjson-3.10.18-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b672502323b6cd133c4af6b79e3bea36bad2d16bca6c1f645903fce83909a7a", size = 132894 }, - { url = "https://files.pythonhosted.org/packages/ca/dd/7bce6fcc5b8c21aef59ba3c67f2166f0a1a9b0317dcca4a9d5bd7934ecfd/orjson-3.10.18-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51f8c63be6e070ec894c629186b1c0fe798662b8687f3d9fdfa5e401c6bd7679", size = 137016 }, - { url = "https://files.pythonhosted.org/packages/1c/4a/b8aea1c83af805dcd31c1f03c95aabb3e19a016b2a4645dd822c5686e94d/orjson-3.10.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f9478ade5313d724e0495d167083c6f3be0dd2f1c9c8a38db9a9e912cdaf947", size = 138290 }, - { url = "https://files.pythonhosted.org/packages/36/d6/7eb05c85d987b688707f45dcf83c91abc2251e0dd9fb4f7be96514f838b1/orjson-3.10.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:187aefa562300a9d382b4b4eb9694806e5848b0cedf52037bb5c228c61bb66d4", size = 142829 }, - { url = "https://files.pythonhosted.org/packages/d2/78/ddd3ee7873f2b5f90f016bc04062713d567435c53ecc8783aab3a4d34915/orjson-3.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da552683bc9da222379c7a01779bddd0ad39dd699dd6300abaf43eadee38334", size = 132805 }, - { url = "https://files.pythonhosted.org/packages/8c/09/c8e047f73d2c5d21ead9c180203e111cddeffc0848d5f0f974e346e21c8e/orjson-3.10.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e450885f7b47a0231979d9c49b567ed1c4e9f69240804621be87c40bc9d3cf17", size = 135008 }, - { url = "https://files.pythonhosted.org/packages/0c/4b/dccbf5055ef8fb6eda542ab271955fc1f9bf0b941a058490293f8811122b/orjson-3.10.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5e3c9cc2ba324187cd06287ca24f65528f16dfc80add48dc99fa6c836bb3137e", size = 413419 }, - { url = "https://files.pythonhosted.org/packages/8a/f3/1eac0c5e2d6d6790bd2025ebfbefcbd37f0d097103d76f9b3f9302af5a17/orjson-3.10.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:50ce016233ac4bfd843ac5471e232b865271d7d9d44cf9d33773bcd883ce442b", size = 153292 }, - { url = "https://files.pythonhosted.org/packages/1f/b4/ef0abf64c8f1fabf98791819ab502c2c8c1dc48b786646533a93637d8999/orjson-3.10.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b3ceff74a8f7ffde0b2785ca749fc4e80e4315c0fd887561144059fb1c138aa7", size = 137182 }, - { url = "https://files.pythonhosted.org/packages/a9/a3/6ea878e7b4a0dc5c888d0370d7752dcb23f402747d10e2257478d69b5e63/orjson-3.10.18-cp311-cp311-win32.whl", hash = "sha256:fdba703c722bd868c04702cac4cb8c6b8ff137af2623bc0ddb3b3e6a2c8996c1", size = 142695 }, - { url = "https://files.pythonhosted.org/packages/79/2a/4048700a3233d562f0e90d5572a849baa18ae4e5ce4c3ba6247e4ece57b0/orjson-3.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:c28082933c71ff4bc6ccc82a454a2bffcef6e1d7379756ca567c772e4fb3278a", size = 134603 }, - { url = "https://files.pythonhosted.org/packages/03/45/10d934535a4993d27e1c84f1810e79ccf8b1b7418cef12151a22fe9bb1e1/orjson-3.10.18-cp311-cp311-win_arm64.whl", hash = "sha256:a6c7c391beaedd3fa63206e5c2b7b554196f14debf1ec9deb54b5d279b1b46f5", size = 131400 }, - { url = "https://files.pythonhosted.org/packages/21/1a/67236da0916c1a192d5f4ccbe10ec495367a726996ceb7614eaa687112f2/orjson-3.10.18-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:50c15557afb7f6d63bc6d6348e0337a880a04eaa9cd7c9d569bcb4e760a24753", size = 249184 }, - { url = "https://files.pythonhosted.org/packages/b3/bc/c7f1db3b1d094dc0c6c83ed16b161a16c214aaa77f311118a93f647b32dc/orjson-3.10.18-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:356b076f1662c9813d5fa56db7d63ccceef4c271b1fb3dd522aca291375fcf17", size = 133279 }, - { url = "https://files.pythonhosted.org/packages/af/84/664657cd14cc11f0d81e80e64766c7ba5c9b7fc1ec304117878cc1b4659c/orjson-3.10.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:559eb40a70a7494cd5beab2d73657262a74a2c59aff2068fdba8f0424ec5b39d", size = 136799 }, - { url = "https://files.pythonhosted.org/packages/9a/bb/f50039c5bb05a7ab024ed43ba25d0319e8722a0ac3babb0807e543349978/orjson-3.10.18-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f3c29eb9a81e2fbc6fd7ddcfba3e101ba92eaff455b8d602bf7511088bbc0eae", size = 132791 }, - { url = "https://files.pythonhosted.org/packages/93/8c/ee74709fc072c3ee219784173ddfe46f699598a1723d9d49cbc78d66df65/orjson-3.10.18-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6612787e5b0756a171c7d81ba245ef63a3533a637c335aa7fcb8e665f4a0966f", size = 137059 }, - { url = "https://files.pythonhosted.org/packages/6a/37/e6d3109ee004296c80426b5a62b47bcadd96a3deab7443e56507823588c5/orjson-3.10.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ac6bd7be0dcab5b702c9d43d25e70eb456dfd2e119d512447468f6405b4a69c", size = 138359 }, - { url = "https://files.pythonhosted.org/packages/4f/5d/387dafae0e4691857c62bd02839a3bf3fa648eebd26185adfac58d09f207/orjson-3.10.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f72f100cee8dde70100406d5c1abba515a7df926d4ed81e20a9730c062fe9ad", size = 142853 }, - { url = "https://files.pythonhosted.org/packages/27/6f/875e8e282105350b9a5341c0222a13419758545ae32ad6e0fcf5f64d76aa/orjson-3.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dca85398d6d093dd41dc0983cbf54ab8e6afd1c547b6b8a311643917fbf4e0c", size = 133131 }, - { url = "https://files.pythonhosted.org/packages/48/b2/73a1f0b4790dcb1e5a45f058f4f5dcadc8a85d90137b50d6bbc6afd0ae50/orjson-3.10.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22748de2a07fcc8781a70edb887abf801bb6142e6236123ff93d12d92db3d406", size = 134834 }, - { url = "https://files.pythonhosted.org/packages/56/f5/7ed133a5525add9c14dbdf17d011dd82206ca6840811d32ac52a35935d19/orjson-3.10.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:3a83c9954a4107b9acd10291b7f12a6b29e35e8d43a414799906ea10e75438e6", size = 413368 }, - { url = "https://files.pythonhosted.org/packages/11/7c/439654221ed9c3324bbac7bdf94cf06a971206b7b62327f11a52544e4982/orjson-3.10.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:303565c67a6c7b1f194c94632a4a39918e067bd6176a48bec697393865ce4f06", size = 153359 }, - { url = "https://files.pythonhosted.org/packages/48/e7/d58074fa0cc9dd29a8fa2a6c8d5deebdfd82c6cfef72b0e4277c4017563a/orjson-3.10.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:86314fdb5053a2f5a5d881f03fca0219bfdf832912aa88d18676a5175c6916b5", size = 137466 }, - { url = "https://files.pythonhosted.org/packages/57/4d/fe17581cf81fb70dfcef44e966aa4003360e4194d15a3f38cbffe873333a/orjson-3.10.18-cp312-cp312-win32.whl", hash = "sha256:187ec33bbec58c76dbd4066340067d9ece6e10067bb0cc074a21ae3300caa84e", size = 142683 }, - { url = "https://files.pythonhosted.org/packages/e6/22/469f62d25ab5f0f3aee256ea732e72dc3aab6d73bac777bd6277955bceef/orjson-3.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:f9f94cf6d3f9cd720d641f8399e390e7411487e493962213390d1ae45c7814fc", size = 134754 }, - { url = "https://files.pythonhosted.org/packages/10/b0/1040c447fac5b91bc1e9c004b69ee50abb0c1ffd0d24406e1350c58a7fcb/orjson-3.10.18-cp312-cp312-win_arm64.whl", hash = "sha256:3d600be83fe4514944500fa8c2a0a77099025ec6482e8087d7659e891f23058a", size = 131218 }, + { url = "https://files.pythonhosted.org/packages/cd/8b/360674cd817faef32e49276187922a946468579fcaf37afdfb6c07046e92/orjson-3.11.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9d2ae0cc6aeb669633e0124531f342a17d8e97ea999e42f12a5ad4adaa304c5f", size = 238238, upload-time = "2025-08-26T17:44:54.214Z" }, + { url = "https://files.pythonhosted.org/packages/05/3d/5fa9ea4b34c1a13be7d9046ba98d06e6feb1d8853718992954ab59d16625/orjson-3.11.3-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:ba21dbb2493e9c653eaffdc38819b004b7b1b246fb77bfc93dc016fe664eac91", size = 127713, upload-time = "2025-08-26T17:44:55.596Z" }, + { url = "https://files.pythonhosted.org/packages/e5/5f/e18367823925e00b1feec867ff5f040055892fc474bf5f7875649ecfa586/orjson-3.11.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00f1a271e56d511d1569937c0447d7dce5a99a33ea0dec76673706360a051904", size = 123241, upload-time = "2025-08-26T17:44:57.185Z" }, + { url = "https://files.pythonhosted.org/packages/0f/bd/3c66b91c4564759cf9f473251ac1650e446c7ba92a7c0f9f56ed54f9f0e6/orjson-3.11.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b67e71e47caa6680d1b6f075a396d04fa6ca8ca09aafb428731da9b3ea32a5a6", size = 127895, upload-time = "2025-08-26T17:44:58.349Z" }, + { url = "https://files.pythonhosted.org/packages/82/b5/dc8dcd609db4766e2967a85f63296c59d4722b39503e5b0bf7fd340d387f/orjson-3.11.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7d012ebddffcce8c85734a6d9e5f08180cd3857c5f5a3ac70185b43775d043d", size = 130303, upload-time = "2025-08-26T17:44:59.491Z" }, + { url = "https://files.pythonhosted.org/packages/48/c2/d58ec5fd1270b2aa44c862171891adc2e1241bd7dab26c8f46eb97c6c6f1/orjson-3.11.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd759f75d6b8d1b62012b7f5ef9461d03c804f94d539a5515b454ba3a6588038", size = 132366, upload-time = "2025-08-26T17:45:00.654Z" }, + { url = "https://files.pythonhosted.org/packages/73/87/0ef7e22eb8dd1ef940bfe3b9e441db519e692d62ed1aae365406a16d23d0/orjson-3.11.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6890ace0809627b0dff19cfad92d69d0fa3f089d3e359a2a532507bb6ba34efb", size = 135180, upload-time = "2025-08-26T17:45:02.424Z" }, + { url = "https://files.pythonhosted.org/packages/bb/6a/e5bf7b70883f374710ad74faf99bacfc4b5b5a7797c1d5e130350e0e28a3/orjson-3.11.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d4a5e041ae435b815e568537755773d05dac031fee6a57b4ba70897a44d9d2", size = 132741, upload-time = "2025-08-26T17:45:03.663Z" }, + { url = "https://files.pythonhosted.org/packages/bd/0c/4577fd860b6386ffaa56440e792af01c7882b56d2766f55384b5b0e9d39b/orjson-3.11.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d68bf97a771836687107abfca089743885fb664b90138d8761cce61d5625d55", size = 131104, upload-time = "2025-08-26T17:45:04.939Z" }, + { url = "https://files.pythonhosted.org/packages/66/4b/83e92b2d67e86d1c33f2ea9411742a714a26de63641b082bdbf3d8e481af/orjson-3.11.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bfc27516ec46f4520b18ef645864cee168d2a027dbf32c5537cb1f3e3c22dac1", size = 403887, upload-time = "2025-08-26T17:45:06.228Z" }, + { url = "https://files.pythonhosted.org/packages/6d/e5/9eea6a14e9b5ceb4a271a1fd2e1dec5f2f686755c0fab6673dc6ff3433f4/orjson-3.11.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f66b001332a017d7945e177e282a40b6997056394e3ed7ddb41fb1813b83e824", size = 145855, upload-time = "2025-08-26T17:45:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/45/78/8d4f5ad0c80ba9bf8ac4d0fc71f93a7d0dc0844989e645e2074af376c307/orjson-3.11.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:212e67806525d2561efbfe9e799633b17eb668b8964abed6b5319b2f1cfbae1f", size = 135361, upload-time = "2025-08-26T17:45:09.625Z" }, + { url = "https://files.pythonhosted.org/packages/0b/5f/16386970370178d7a9b438517ea3d704efcf163d286422bae3b37b88dbb5/orjson-3.11.3-cp311-cp311-win32.whl", hash = "sha256:6e8e0c3b85575a32f2ffa59de455f85ce002b8bdc0662d6b9c2ed6d80ab5d204", size = 136190, upload-time = "2025-08-26T17:45:10.962Z" }, + { url = "https://files.pythonhosted.org/packages/09/60/db16c6f7a41dd8ac9fb651f66701ff2aeb499ad9ebc15853a26c7c152448/orjson-3.11.3-cp311-cp311-win_amd64.whl", hash = "sha256:6be2f1b5d3dc99a5ce5ce162fc741c22ba9f3443d3dd586e6a1211b7bc87bc7b", size = 131389, upload-time = "2025-08-26T17:45:12.285Z" }, + { url = "https://files.pythonhosted.org/packages/3e/2a/bb811ad336667041dea9b8565c7c9faf2f59b47eb5ab680315eea612ef2e/orjson-3.11.3-cp311-cp311-win_arm64.whl", hash = "sha256:fafb1a99d740523d964b15c8db4eabbfc86ff29f84898262bf6e3e4c9e97e43e", size = 126120, upload-time = "2025-08-26T17:45:13.515Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b0/a7edab2a00cdcb2688e1c943401cb3236323e7bfd2839815c6131a3742f4/orjson-3.11.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8c752089db84333e36d754c4baf19c0e1437012242048439c7e80eb0e6426e3b", size = 238259, upload-time = "2025-08-26T17:45:15.093Z" }, + { url = "https://files.pythonhosted.org/packages/e1/c6/ff4865a9cc398a07a83342713b5932e4dc3cb4bf4bc04e8f83dedfc0d736/orjson-3.11.3-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:9b8761b6cf04a856eb544acdd82fc594b978f12ac3602d6374a7edb9d86fd2c2", size = 127633, upload-time = "2025-08-26T17:45:16.417Z" }, + { url = "https://files.pythonhosted.org/packages/6e/e6/e00bea2d9472f44fe8794f523e548ce0ad51eb9693cf538a753a27b8bda4/orjson-3.11.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b13974dc8ac6ba22feaa867fc19135a3e01a134b4f7c9c28162fed4d615008a", size = 123061, upload-time = "2025-08-26T17:45:17.673Z" }, + { url = "https://files.pythonhosted.org/packages/54/31/9fbb78b8e1eb3ac605467cb846e1c08d0588506028b37f4ee21f978a51d4/orjson-3.11.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f83abab5bacb76d9c821fd5c07728ff224ed0e52d7a71b7b3de822f3df04e15c", size = 127956, upload-time = "2025-08-26T17:45:19.172Z" }, + { url = "https://files.pythonhosted.org/packages/36/88/b0604c22af1eed9f98d709a96302006915cfd724a7ebd27d6dd11c22d80b/orjson-3.11.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6fbaf48a744b94091a56c62897b27c31ee2da93d826aa5b207131a1e13d4064", size = 130790, upload-time = "2025-08-26T17:45:20.586Z" }, + { url = "https://files.pythonhosted.org/packages/0e/9d/1c1238ae9fffbfed51ba1e507731b3faaf6b846126a47e9649222b0fd06f/orjson-3.11.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc779b4f4bba2847d0d2940081a7b6f7b5877e05408ffbb74fa1faf4a136c424", size = 132385, upload-time = "2025-08-26T17:45:22.036Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b5/c06f1b090a1c875f337e21dd71943bc9d84087f7cdf8c6e9086902c34e42/orjson-3.11.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd4b909ce4c50faa2192da6bb684d9848d4510b736b0611b6ab4020ea6fd2d23", size = 135305, upload-time = "2025-08-26T17:45:23.4Z" }, + { url = "https://files.pythonhosted.org/packages/a0/26/5f028c7d81ad2ebbf84414ba6d6c9cac03f22f5cd0d01eb40fb2d6a06b07/orjson-3.11.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:524b765ad888dc5518bbce12c77c2e83dee1ed6b0992c1790cc5fb49bb4b6667", size = 132875, upload-time = "2025-08-26T17:45:25.182Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d4/b8df70d9cfb56e385bf39b4e915298f9ae6c61454c8154a0f5fd7efcd42e/orjson-3.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:84fd82870b97ae3cdcea9d8746e592b6d40e1e4d4527835fc520c588d2ded04f", size = 130940, upload-time = "2025-08-26T17:45:27.209Z" }, + { url = "https://files.pythonhosted.org/packages/da/5e/afe6a052ebc1a4741c792dd96e9f65bf3939d2094e8b356503b68d48f9f5/orjson-3.11.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fbecb9709111be913ae6879b07bafd4b0785b44c1eb5cac8ac76da048b3885a1", size = 403852, upload-time = "2025-08-26T17:45:28.478Z" }, + { url = "https://files.pythonhosted.org/packages/f8/90/7bbabafeb2ce65915e9247f14a56b29c9334003536009ef5b122783fe67e/orjson-3.11.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9dba358d55aee552bd868de348f4736ca5a4086d9a62e2bfbbeeb5629fe8b0cc", size = 146293, upload-time = "2025-08-26T17:45:29.86Z" }, + { url = "https://files.pythonhosted.org/packages/27/b3/2d703946447da8b093350570644a663df69448c9d9330e5f1d9cce997f20/orjson-3.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eabcf2e84f1d7105f84580e03012270c7e97ecb1fb1618bda395061b2a84a049", size = 135470, upload-time = "2025-08-26T17:45:31.243Z" }, + { url = "https://files.pythonhosted.org/packages/38/70/b14dcfae7aff0e379b0119c8a812f8396678919c431efccc8e8a0263e4d9/orjson-3.11.3-cp312-cp312-win32.whl", hash = "sha256:3782d2c60b8116772aea8d9b7905221437fdf53e7277282e8d8b07c220f96cca", size = 136248, upload-time = "2025-08-26T17:45:32.567Z" }, + { url = "https://files.pythonhosted.org/packages/35/b8/9e3127d65de7fff243f7f3e53f59a531bf6bb295ebe5db024c2503cc0726/orjson-3.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:79b44319268af2eaa3e315b92298de9a0067ade6e6003ddaef72f8e0bedb94f1", size = 131437, upload-time = "2025-08-26T17:45:34.949Z" }, + { url = "https://files.pythonhosted.org/packages/51/92/a946e737d4d8a7fd84a606aba96220043dcc7d6988b9e7551f7f6d5ba5ad/orjson-3.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:0e92a4e83341ef79d835ca21b8bd13e27c859e4e9e4d7b63defc6e58462a3710", size = 125978, upload-time = "2025-08-26T17:45:36.422Z" }, ] [[package]] diff --git a/web/.env.example b/web/.env.example index 37bfc939eb..23b72b3414 100644 --- a/web/.env.example +++ b/web/.env.example @@ -2,6 +2,8 @@ NEXT_PUBLIC_DEPLOY_ENV=DEVELOPMENT # The deployment edition, SELF_HOSTED NEXT_PUBLIC_EDITION=SELF_HOSTED +# The base path for the application +NEXT_PUBLIC_BASE_PATH= # The base URL of console application, refers to the Console base URL of WEB service if console domain is # different from api or web app domain. # example: http://cloud.dify.ai/console/api diff --git a/web/Dockerfile b/web/Dockerfile index 2ea8402cd6..317a7f9c5b 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -12,6 +12,7 @@ RUN apk add --no-cache tzdata RUN corepack enable ENV PNPM_HOME="/pnpm" ENV PATH="$PNPM_HOME:$PATH" +ENV NEXT_PUBLIC_BASE_PATH= # install packages diff --git a/web/app/account/account-page/AvatarWithEdit.tsx b/web/app/account/(commonLayout)/account-page/AvatarWithEdit.tsx similarity index 100% rename from web/app/account/account-page/AvatarWithEdit.tsx rename to web/app/account/(commonLayout)/account-page/AvatarWithEdit.tsx diff --git a/web/app/account/account-page/email-change-modal.tsx b/web/app/account/(commonLayout)/account-page/email-change-modal.tsx similarity index 100% rename from web/app/account/account-page/email-change-modal.tsx rename to web/app/account/(commonLayout)/account-page/email-change-modal.tsx diff --git a/web/app/account/account-page/index.tsx b/web/app/account/(commonLayout)/account-page/index.tsx similarity index 100% rename from web/app/account/account-page/index.tsx rename to web/app/account/(commonLayout)/account-page/index.tsx diff --git a/web/app/account/avatar.tsx b/web/app/account/(commonLayout)/avatar.tsx similarity index 100% rename from web/app/account/avatar.tsx rename to web/app/account/(commonLayout)/avatar.tsx diff --git a/web/app/account/delete-account/components/check-email.tsx b/web/app/account/(commonLayout)/delete-account/components/check-email.tsx similarity index 100% rename from web/app/account/delete-account/components/check-email.tsx rename to web/app/account/(commonLayout)/delete-account/components/check-email.tsx diff --git a/web/app/account/delete-account/components/feed-back.tsx b/web/app/account/(commonLayout)/delete-account/components/feed-back.tsx similarity index 100% rename from web/app/account/delete-account/components/feed-back.tsx rename to web/app/account/(commonLayout)/delete-account/components/feed-back.tsx diff --git a/web/app/account/delete-account/components/verify-email.tsx b/web/app/account/(commonLayout)/delete-account/components/verify-email.tsx similarity index 100% rename from web/app/account/delete-account/components/verify-email.tsx rename to web/app/account/(commonLayout)/delete-account/components/verify-email.tsx diff --git a/web/app/account/delete-account/index.tsx b/web/app/account/(commonLayout)/delete-account/index.tsx similarity index 100% rename from web/app/account/delete-account/index.tsx rename to web/app/account/(commonLayout)/delete-account/index.tsx diff --git a/web/app/account/delete-account/state.tsx b/web/app/account/(commonLayout)/delete-account/state.tsx similarity index 100% rename from web/app/account/delete-account/state.tsx rename to web/app/account/(commonLayout)/delete-account/state.tsx diff --git a/web/app/account/header.tsx b/web/app/account/(commonLayout)/header.tsx similarity index 97% rename from web/app/account/header.tsx rename to web/app/account/(commonLayout)/header.tsx index af09ca1c9c..ce804055b5 100644 --- a/web/app/account/header.tsx +++ b/web/app/account/(commonLayout)/header.tsx @@ -2,11 +2,11 @@ import { useTranslation } from 'react-i18next' import { RiArrowRightUpLine, RiRobot2Line } from '@remixicon/react' import { useRouter } from 'next/navigation' -import Button from '../components/base/button' -import Avatar from './avatar' +import Button from '@/app/components/base/button' import DifyLogo from '@/app/components/base/logo/dify-logo' import { useCallback } from 'react' import { useGlobalPublicStore } from '@/context/global-public-context' +import Avatar from './avatar' const Header = () => { const { t } = useTranslation() diff --git a/web/app/account/layout.tsx b/web/app/account/(commonLayout)/layout.tsx similarity index 100% rename from web/app/account/layout.tsx rename to web/app/account/(commonLayout)/layout.tsx diff --git a/web/app/account/page.tsx b/web/app/account/(commonLayout)/page.tsx similarity index 100% rename from web/app/account/page.tsx rename to web/app/account/(commonLayout)/page.tsx diff --git a/web/app/account/oauth/authorize/layout.tsx b/web/app/account/oauth/authorize/layout.tsx new file mode 100644 index 0000000000..078d23114a --- /dev/null +++ b/web/app/account/oauth/authorize/layout.tsx @@ -0,0 +1,37 @@ +'use client' +import Header from '@/app/signin/_header' + +import cn from '@/utils/classnames' +import { useGlobalPublicStore } from '@/context/global-public-context' +import useDocumentTitle from '@/hooks/use-document-title' +import { AppContextProvider } from '@/context/app-context' +import { useMemo } from 'react' + +export default function SignInLayout({ children }: any) { + const { systemFeatures } = useGlobalPublicStore() + useDocumentTitle('') + const isLoggedIn = useMemo(() => { + try { + return Boolean(localStorage.getItem('console_token') && localStorage.getItem('refresh_token')) + } + catch { return false } + }, []) + return <> +