From b365bffd0209479e92a6eff51139cd9069935c53 Mon Sep 17 00:00:00 2001 From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Date: Mon, 8 Dec 2025 10:20:43 +0800 Subject: [PATCH 001/128] hotfix(otel): replace hardcoded span attributes with shared constants (#29227) Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> --- api/extensions/otel/runtime.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/api/extensions/otel/runtime.py b/api/extensions/otel/runtime.py index f8ed330cf6..16f5ccf488 100644 --- a/api/extensions/otel/runtime.py +++ b/api/extensions/otel/runtime.py @@ -11,6 +11,7 @@ from opentelemetry.propagators.composite import CompositePropagator from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator from configs import dify_config +from extensions.otel.semconv import DifySpanAttributes, GenAIAttributes from libs.helper import extract_tenant_id from models import Account, EndUser @@ -51,8 +52,8 @@ def on_user_loaded(_sender, user: Union["Account", "EndUser"]): if not tenant_id: return if current_span: - current_span.set_attribute("service.tenant.id", tenant_id) - current_span.set_attribute("service.user.id", user.id) + current_span.set_attribute(DifySpanAttributes.TENANT_ID, tenant_id) + current_span.set_attribute(GenAIAttributes.USER_ID, user.id) except Exception: logger.exception("Error setting tenant and user attributes") pass From d1f4a75272065836aef1aebeadda7478226a95a6 Mon Sep 17 00:00:00 2001 From: kurokobo Date: Mon, 8 Dec 2025 11:21:15 +0900 Subject: [PATCH 002/128] fix: remove 1px border from knowledge pipeline editor (#29232) --- .../datasets/(datasetDetailLayout)/[datasetId]/layout-main.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/layout-main.tsx b/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/layout-main.tsx index da8839e869..3effb79f20 100644 --- a/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/layout-main.tsx +++ b/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/layout-main.tsx @@ -121,7 +121,7 @@ const DatasetDetailLayout: FC = (props) => {
Date: Mon, 8 Dec 2025 10:22:26 +0800 Subject: [PATCH 003/128] fix: view log detail clear query params (#29256) --- web/app/components/app/log/index.tsx | 38 ++++++++++++++++++++++------ 1 file changed, 30 insertions(+), 8 deletions(-) diff --git a/web/app/components/app/log/index.tsx b/web/app/components/app/log/index.tsx index cedf2de74d..4fda71bece 100644 --- a/web/app/components/app/log/index.tsx +++ b/web/app/components/app/log/index.tsx @@ -27,24 +27,33 @@ export type QueryParam = { sort_by?: string } +const defaultQueryParams: QueryParam = { + period: '2', + annotation_status: 'all', + sort_by: '-created_at', +} + +const logsStateCache = new Map() + const Logs: FC = ({ appDetail }) => { const { t } = useTranslation() const router = useRouter() const pathname = usePathname() const searchParams = useSearchParams() - const [queryParams, setQueryParams] = useState({ - period: '2', - annotation_status: 'all', - sort_by: '-created_at', - }) const getPageFromParams = useCallback(() => { const pageParam = Number.parseInt(searchParams.get('page') || '1', 10) if (Number.isNaN(pageParam) || pageParam < 1) return 0 return pageParam - 1 }, [searchParams]) - const [currPage, setCurrPage] = React.useState(() => getPageFromParams()) - const [limit, setLimit] = React.useState(APP_PAGE_LIMIT) + const cachedState = logsStateCache.get(appDetail.id) + const [queryParams, setQueryParams] = useState(cachedState?.queryParams ?? defaultQueryParams) + const [currPage, setCurrPage] = React.useState(() => cachedState?.currPage ?? getPageFromParams()) + const [limit, setLimit] = React.useState(cachedState?.limit ?? APP_PAGE_LIMIT) const debouncedQueryParams = useDebounce(queryParams, { wait: 500 }) useEffect(() => { @@ -52,6 +61,14 @@ const Logs: FC = ({ appDetail }) => { setCurrPage(prev => (prev === pageFromParams ? prev : pageFromParams)) }, [getPageFromParams]) + useEffect(() => { + logsStateCache.set(appDetail.id, { + queryParams, + currPage, + limit, + }) + }, [appDetail.id, currPage, limit, queryParams]) + // Get the app type first const isChatMode = appDetail.mode !== AppModeEnum.COMPLETION @@ -85,6 +102,11 @@ const Logs: FC = ({ appDetail }) => { const total = isChatMode ? chatConversations?.total : completionConversations?.total + const handleQueryParamsChange = useCallback((next: QueryParam) => { + setCurrPage(0) + setQueryParams(next) + }, []) + const handlePageChange = useCallback((page: number) => { setCurrPage(page) const params = new URLSearchParams(searchParams.toString()) @@ -101,7 +123,7 @@ const Logs: FC = ({ appDetail }) => {

{t('appLog.description')}

- + {total === undefined ? : total > 0 From 88bfeee23402ee5048f5da02136f2ebcefdd377c Mon Sep 17 00:00:00 2001 From: kenwoodjw Date: Mon, 8 Dec 2025 10:22:57 +0800 Subject: [PATCH 004/128] feat: allow admin api key to bypass csrf validation (#29139) Signed-off-by: kenwoodjw --- api/libs/token.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/api/libs/token.py b/api/libs/token.py index 098ff958da..a34db70764 100644 --- a/api/libs/token.py +++ b/api/libs/token.py @@ -189,6 +189,11 @@ def build_force_logout_cookie_headers() -> list[str]: def check_csrf_token(request: Request, user_id: str): # some apis are sent by beacon, so we need to bypass csrf token check # since these APIs are post, they are already protected by SameSite: Lax, so csrf is not required. + if dify_config.ADMIN_API_KEY_ENABLE: + auth_token = extract_access_token(request) + if auth_token and auth_token == dify_config.ADMIN_API_KEY: + return + def _unauthorized(): raise Unauthorized("CSRF token is missing or invalid.") From ee0fe8c7f9e7e32ed5a409e27ce688bdc9770831 Mon Sep 17 00:00:00 2001 From: wangxiaolei Date: Mon, 8 Dec 2025 10:27:02 +0800 Subject: [PATCH 005/128] feat: support suggested_questions_after_answer to be configed (#29254) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- README.md | 13 ++ api/.env.example | 16 ++ api/core/llm_generator/llm_generator.py | 7 +- api/core/llm_generator/prompts.py | 14 +- docs/suggested-questions-configuration.md | 253 ++++++++++++++++++++++ 5 files changed, 301 insertions(+), 2 deletions(-) create mode 100644 docs/suggested-questions-configuration.md diff --git a/README.md b/README.md index 09ba1f634b..b71764a214 100644 --- a/README.md +++ b/README.md @@ -139,6 +139,19 @@ Star Dify on GitHub and be instantly notified of new releases. If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). +#### Customizing Suggested Questions + +You can now customize the "Suggested Questions After Answer" feature to better fit your use case. For example, to generate longer, more technical questions: + +```bash +# In your .env file +SUGGESTED_QUESTIONS_PROMPT='Please help me predict the five most likely technical follow-up questions a developer would ask. Focus on implementation details, best practices, and architecture considerations. Keep each question between 40-60 characters. Output must be JSON array: ["question1","question2","question3","question4","question5"]' +SUGGESTED_QUESTIONS_MAX_TOKENS=512 +SUGGESTED_QUESTIONS_TEMPERATURE=0.3 +``` + +See the [Suggested Questions Configuration Guide](docs/suggested-questions-configuration.md) for detailed examples and usage instructions. + ### Metrics Monitoring with Grafana Import the dashboard to Grafana, using Dify's PostgreSQL database as data source, to monitor metrics in granularity of apps, tenants, messages, and more. diff --git a/api/.env.example b/api/.env.example index 50607f5b35..35aaabbc10 100644 --- a/api/.env.example +++ b/api/.env.example @@ -633,6 +633,22 @@ SWAGGER_UI_PATH=/swagger-ui.html # Set to false to export dataset IDs as plain text for easier cross-environment import DSL_EXPORT_ENCRYPT_DATASET_ID=true +# Suggested Questions After Answer Configuration +# These environment variables allow customization of the suggested questions feature +# +# Custom prompt for generating suggested questions (optional) +# If not set, uses the default prompt that generates 3 questions under 20 characters each +# Example: "Please help me predict the five most likely technical follow-up questions a developer would ask. Focus on implementation details, best practices, and architecture considerations. Keep each question between 40-60 characters. Output must be JSON array: [\"question1\",\"question2\",\"question3\",\"question4\",\"question5\"]" +# SUGGESTED_QUESTIONS_PROMPT= + +# Maximum number of tokens for suggested questions generation (default: 256) +# Adjust this value for longer questions or more questions +# SUGGESTED_QUESTIONS_MAX_TOKENS=256 + +# Temperature for suggested questions generation (default: 0.0) +# Higher values (0.5-1.0) produce more creative questions, lower values (0.0-0.3) produce more focused questions +# SUGGESTED_QUESTIONS_TEMPERATURE=0 + # Tenant isolated task queue configuration TENANT_ISOLATED_TASK_CONCURRENCY=1 diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index bd893b17f1..6b168fd4e8 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -15,6 +15,8 @@ from core.llm_generator.prompts import ( LLM_MODIFY_CODE_SYSTEM, LLM_MODIFY_PROMPT_SYSTEM, PYTHON_CODE_GENERATOR_PROMPT_TEMPLATE, + SUGGESTED_QUESTIONS_MAX_TOKENS, + SUGGESTED_QUESTIONS_TEMPERATURE, SYSTEM_STRUCTURED_OUTPUT_GENERATE, WORKFLOW_RULE_CONFIG_PROMPT_GENERATE_TEMPLATE, ) @@ -124,7 +126,10 @@ class LLMGenerator: try: response: LLMResult = model_instance.invoke_llm( prompt_messages=list(prompt_messages), - model_parameters={"max_tokens": 256, "temperature": 0}, + model_parameters={ + "max_tokens": SUGGESTED_QUESTIONS_MAX_TOKENS, + "temperature": SUGGESTED_QUESTIONS_TEMPERATURE, + }, stream=False, ) diff --git a/api/core/llm_generator/prompts.py b/api/core/llm_generator/prompts.py index 9268347526..ec2b7f2d44 100644 --- a/api/core/llm_generator/prompts.py +++ b/api/core/llm_generator/prompts.py @@ -1,4 +1,6 @@ # Written by YORKI MINAKO🤡, Edited by Xiaoyi, Edited by yasu-oh +import os + CONVERSATION_TITLE_PROMPT = """You are asked to generate a concise chat title by decomposing the user’s input into two parts: “Intention” and “Subject”. 1. Detect Input Language @@ -94,7 +96,8 @@ JAVASCRIPT_CODE_GENERATOR_PROMPT_TEMPLATE = ( ) -SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT = ( +# Default prompt for suggested questions (can be overridden by environment variable) +_DEFAULT_SUGGESTED_QUESTIONS_AFTER_ANSWER_PROMPT = ( "Please help me predict the three most likely questions that human would ask, " "and keep each question under 20 characters.\n" "MAKE SURE your output is the SAME language as the Assistant's latest response. " @@ -102,6 +105,15 @@ SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT = ( '["question1","question2","question3"]\n' ) +# Environment variable override for suggested questions prompt +SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT = os.getenv( + "SUGGESTED_QUESTIONS_PROMPT", _DEFAULT_SUGGESTED_QUESTIONS_AFTER_ANSWER_PROMPT +) + +# Configurable LLM parameters for suggested questions (can be overridden by environment variables) +SUGGESTED_QUESTIONS_MAX_TOKENS = int(os.getenv("SUGGESTED_QUESTIONS_MAX_TOKENS", "256")) +SUGGESTED_QUESTIONS_TEMPERATURE = float(os.getenv("SUGGESTED_QUESTIONS_TEMPERATURE", "0")) + GENERATOR_QA_PROMPT = ( " The user will send a long text. Generate a Question and Answer pairs only using the knowledge" " in the long text. Please think step by step." diff --git a/docs/suggested-questions-configuration.md b/docs/suggested-questions-configuration.md new file mode 100644 index 0000000000..c726d3b157 --- /dev/null +++ b/docs/suggested-questions-configuration.md @@ -0,0 +1,253 @@ +# Configurable Suggested Questions After Answer + +This document explains how to configure the "Suggested Questions After Answer" feature in Dify using environment variables. + +## Overview + +The suggested questions feature generates follow-up questions after each AI response to help users continue the conversation. By default, Dify generates 3 short questions (under 20 characters each), but you can customize this behavior to better fit your specific use case. + +## Environment Variables + +### `SUGGESTED_QUESTIONS_PROMPT` + +**Description**: Custom prompt template for generating suggested questions. + +**Default**: + +``` +Please help me predict the three most likely questions that human would ask, and keep each question under 20 characters. +MAKE SURE your output is the SAME language as the Assistant's latest response. +The output must be an array in JSON format following the specified schema: +["question1","question2","question3"] +``` + +**Usage Examples**: + +1. **Technical/Developer Questions (Your Use Case)**: + + ```bash + export SUGGESTED_QUESTIONS_PROMPT='Please help me predict the five most likely technical follow-up questions a developer would ask. Focus on implementation details, best practices, and architecture considerations. Keep each question between 40-60 characters. Output must be JSON array: ["question1","question2","question3","question4","question5"]' + ``` + +1. **Customer Support**: + + ```bash + export SUGGESTED_QUESTIONS_PROMPT='Generate 3 helpful follow-up questions that guide customers toward solving their own problems. Focus on troubleshooting steps and common issues. Keep questions under 30 characters. JSON format: ["q1","q2","q3"]' + ``` + +1. **Educational Content**: + + ```bash + export SUGGESTED_QUESTIONS_PROMPT='Create 4 thought-provoking questions that help students deeper understand the topic. Focus on concepts, relationships, and applications. Questions should be 25-40 characters. JSON: ["question1","question2","question3","question4"]' + ``` + +1. **Multilingual Support**: + + ```bash + export SUGGESTED_QUESTIONS_PROMPT='Generate exactly 3 follow-up questions in the same language as the conversation. Adapt question length appropriately for the language (Chinese: 10-15 chars, English: 20-30 chars, Arabic: 25-35 chars). Always output valid JSON array.' + ``` + +**Important Notes**: + +- The prompt must request JSON array output format +- Include language matching instructions for multilingual support +- Specify clear character limits or question count requirements +- Focus on your specific domain or use case + +### `SUGGESTED_QUESTIONS_MAX_TOKENS` + +**Description**: Maximum number of tokens for the LLM response. + +**Default**: `256` + +**Usage**: + +```bash +export SUGGESTED_QUESTIONS_MAX_TOKENS=512 # For longer questions or more questions +``` + +**Recommended Values**: + +- `256`: Default, good for 3-4 short questions +- `384`: Medium, good for 4-5 medium-length questions +- `512`: High, good for 5+ longer questions or complex prompts +- `1024`: Maximum, for very complex question generation + +### `SUGGESTED_QUESTIONS_TEMPERATURE` + +**Description**: Temperature parameter for LLM creativity. + +**Default**: `0.0` + +**Usage**: + +```bash +export SUGGESTED_QUESTIONS_TEMPERATURE=0.3 # Balanced creativity +``` + +**Recommended Values**: + +- `0.0-0.2`: Very focused, predictable questions (good for technical support) +- `0.3-0.5`: Balanced creativity and relevance (good for general use) +- `0.6-0.8`: More creative, diverse questions (good for brainstorming) +- `0.9-1.0`: Maximum creativity (good for educational exploration) + +## Configuration Examples + +### Example 1: Developer Documentation Chatbot + +```bash +# .env file +SUGGESTED_QUESTIONS_PROMPT='Generate exactly 5 technical follow-up questions that developers would ask after reading code documentation. Focus on implementation details, edge cases, performance considerations, and best practices. Each question should be 40-60 characters long. Output as JSON array: ["question1","question2","question3","question4","question5"]' +SUGGESTED_QUESTIONS_MAX_TOKENS=512 +SUGGESTED_QUESTIONS_TEMPERATURE=0.3 +``` + +### Example 2: Customer Service Bot + +```bash +# .env file +SUGGESTED_QUESTIONS_PROMPT='Create 3 actionable follow-up questions that help customers resolve their own issues. Focus on common problems, troubleshooting steps, and product features. Keep questions simple and under 25 characters. JSON: ["q1","q2","q3"]' +SUGGESTED_QUESTIONS_MAX_TOKENS=256 +SUGGESTED_QUESTIONS_TEMPERATURE=0.1 +``` + +### Example 3: Educational Tutor + +```bash +# .env file +SUGGESTED_QUESTIONS_PROMPT='Generate 4 thought-provoking questions that help students deepen their understanding of the topic. Focus on relationships between concepts, practical applications, and critical thinking. Questions should be 30-45 characters. Output: ["question1","question2","question3","question4"]' +SUGGESTED_QUESTIONS_MAX_TOKENS=384 +SUGGESTED_QUESTIONS_TEMPERATURE=0.6 +``` + +## Implementation Details + +### How It Works + +1. **Environment Variable Loading**: The system checks for environment variables at startup +1. **Fallback to Defaults**: If no environment variables are set, original behavior is preserved +1. **Prompt Template**: The custom prompt is used as-is, allowing full control over question generation +1. **LLM Parameters**: Custom max_tokens and temperature are passed to the LLM API +1. **JSON Parsing**: The system expects JSON array output and parses it accordingly + +### File Changes + +The implementation modifies these files: + +- `api/core/llm_generator/prompts.py`: Environment variable support +- `api/core/llm_generator/llm_generator.py`: Custom LLM parameters +- `api/.env.example`: Documentation of new variables + +### Backward Compatibility + +- ✅ **Zero Breaking Changes**: Works exactly as before if no environment variables are set +- ✅ **Default Behavior Preserved**: Original prompt and parameters used as fallbacks +- ✅ **No Database Changes**: Pure environment variable configuration +- ✅ **No UI Changes Required**: Configuration happens at deployment level + +## Testing Your Configuration + +### Local Testing + +1. Set environment variables: + + ```bash + export SUGGESTED_QUESTIONS_PROMPT='Your test prompt...' + export SUGGESTED_QUESTIONS_MAX_TOKENS=300 + export SUGGESTED_QUESTIONS_TEMPERATURE=0.4 + ``` + +1. Start Dify API: + + ```bash + cd api + python -m flask run --host 0.0.0.0 --port=5001 --debug + ``` + +1. Test the feature in your chat application and verify the questions match your expectations. + +### Monitoring + +Monitor the following when testing: + +- **Question Quality**: Are questions relevant and helpful? +- **Language Matching**: Do questions match the conversation language? +- **JSON Format**: Is output properly formatted as JSON array? +- **Length Constraints**: Do questions follow your length requirements? +- **Response Time**: Are the custom parameters affecting performance? + +## Troubleshooting + +### Common Issues + +1. **Invalid JSON Output**: + + - **Problem**: LLM doesn't return valid JSON + - **Solution**: Make sure your prompt explicitly requests JSON array format + +1. **Questions Too Long/Short**: + + - **Problem**: Questions don't follow length constraints + - **Solution**: Be more specific about character limits in your prompt + +1. **Too Few/Many Questions**: + + - **Problem**: Wrong number of questions generated + - **Solution**: Clearly specify the exact number in your prompt + +1. **Language Mismatch**: + + - **Problem**: Questions in wrong language + - **Solution**: Include explicit language matching instructions in prompt + +1. **Performance Issues**: + + - **Problem**: Slow response times + - **Solution**: Reduce `SUGGESTED_QUESTIONS_MAX_TOKENS` or simplify prompt + +### Debug Logging + +To debug your configuration, you can temporarily add logging to see the actual prompt and parameters being used: + +```python +import logging +logger = logging.getLogger(__name__) + +# In llm_generator.py +logger.info(f"Suggested questions prompt: {prompt}") +logger.info(f"Max tokens: {SUGGESTED_QUESTIONS_MAX_TOKENS}") +logger.info(f"Temperature: {SUGGESTED_QUESTIONS_TEMPERATURE}") +``` + +## Migration Guide + +### From Default Configuration + +If you're currently using the default configuration and want to customize: + +1. **Assess Your Needs**: Determine what aspects need customization (question count, length, domain focus) +1. **Design Your Prompt**: Write a custom prompt that addresses your specific use case +1. **Choose Parameters**: Select appropriate max_tokens and temperature values +1. **Test Incrementally**: Start with small changes and test thoroughly +1. **Deploy Gradually**: Roll out to production after successful testing + +### Best Practices + +1. **Start Simple**: Begin with minimal changes to the default prompt +1. **Test Thoroughly**: Test with various conversation types and languages +1. **Monitor Performance**: Watch for impact on response times and costs +1. **Get User Feedback**: Collect feedback on question quality and relevance +1. **Iterate**: Refine your configuration based on real-world usage + +## Future Enhancements + +This environment variable approach provides immediate customization while maintaining backward compatibility. Future enhancements could include: + +1. **App-Level Configuration**: Different apps with different suggested question settings +1. **Dynamic Prompts**: Context-aware prompts based on conversation content +1. **Multi-Model Support**: Different models for different types of questions +1. **Analytics Dashboard**: Insights into question effectiveness and usage patterns +1. **A/B Testing**: Built-in testing of different prompt configurations + +For now, the environment variable approach offers a simple, reliable way to customize the suggested questions feature for your specific needs. From 4b8bd4b891159a21b5acda50449022b44f5fb855 Mon Sep 17 00:00:00 2001 From: yodhcn <47470844+yodhcn@users.noreply.github.com> Date: Mon, 8 Dec 2025 10:40:35 +0800 Subject: [PATCH 006/128] Fix(#29181): convert uuid route param to str in DatasetDocumentListApi.get (#29207) --- api/controllers/console/datasets/datasets_document.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 2663c939bc..8ac285e9f8 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -201,8 +201,9 @@ class DatasetDocumentListApi(Resource): @setup_required @login_required @account_initialization_required - def get(self, dataset_id: str): + def get(self, dataset_id): current_user, current_tenant_id = current_account_with_tenant() + dataset_id = str(dataset_id) page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) search = request.args.get("keyword", default=None, type=str) From 6942666d03dabe937b5b96cc021709190000b1d6 Mon Sep 17 00:00:00 2001 From: Wu Tianwei <30284043+WTW0313@users.noreply.github.com> Date: Mon, 8 Dec 2025 11:48:49 +0800 Subject: [PATCH 007/128] chore(deps): update @lexical packages to version 0.38.2 in package.json and pnpm-lock.yaml (#29260) --- web/package.json | 12 +- web/pnpm-lock.yaml | 388 ++++++++++++++------------------------------- 2 files changed, 124 insertions(+), 276 deletions(-) diff --git a/web/package.json b/web/package.json index 5e86fead4f..478abceb45 100644 --- a/web/package.json +++ b/web/package.json @@ -56,12 +56,12 @@ "@heroicons/react": "^2.2.0", "@hookform/resolvers": "^3.10.0", "@lexical/code": "^0.38.2", - "@lexical/link": "^0.36.2", + "@lexical/link": "^0.38.2", "@lexical/list": "^0.38.2", - "@lexical/react": "^0.36.2", - "@lexical/selection": "^0.37.0", + "@lexical/react": "^0.38.2", + "@lexical/selection": "^0.38.2", "@lexical/text": "^0.38.2", - "@lexical/utils": "^0.37.0", + "@lexical/utils": "^0.38.2", "@monaco-editor/react": "^4.7.0", "@octokit/core": "^6.1.6", "@octokit/request-error": "^6.1.8", @@ -99,7 +99,7 @@ "katex": "^0.16.25", "ky": "^1.12.0", "lamejs": "^1.2.1", - "lexical": "^0.36.2", + "lexical": "^0.38.2", "line-clamp": "^1.0.0", "lodash-es": "^4.17.21", "mermaid": "~11.11.0", @@ -237,8 +237,6 @@ }, "pnpm": { "overrides": { - "lexical": "0.37.0", - "@lexical/*": "0.37.0", "@monaco-editor/loader": "1.5.0", "@eslint/plugin-kit@<0.3.4": "0.3.4", "brace-expansion@<2.0.2": "2.0.2", diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 298f451db5..02b1c9b592 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -15,8 +15,6 @@ overrides: vite: ~6.4.1 prismjs: ~1.30 brace-expansion: ~2.0 - lexical: 0.37.0 - '@lexical/*': 0.37.0 '@monaco-editor/loader': 1.5.0 '@eslint/plugin-kit@<0.3.4': 0.3.4 brace-expansion@<2.0.2: 2.0.2 @@ -88,23 +86,23 @@ importers: specifier: ^0.38.2 version: 0.38.2 '@lexical/link': - specifier: ^0.36.2 - version: 0.36.2 + specifier: ^0.38.2 + version: 0.38.2 '@lexical/list': specifier: ^0.38.2 version: 0.38.2 '@lexical/react': - specifier: ^0.36.2 - version: 0.36.2(react-dom@19.2.1(react@19.2.1))(react@19.2.1)(yjs@13.6.27) + specifier: ^0.38.2 + version: 0.38.2(react-dom@19.2.1(react@19.2.1))(react@19.2.1)(yjs@13.6.27) '@lexical/selection': - specifier: ^0.37.0 - version: 0.37.0 + specifier: ^0.38.2 + version: 0.38.2 '@lexical/text': specifier: ^0.38.2 version: 0.38.2 '@lexical/utils': - specifier: ^0.37.0 - version: 0.37.0 + specifier: ^0.38.2 + version: 0.38.2 '@monaco-editor/react': specifier: ^4.7.0 version: 4.7.0(monaco-editor@0.55.1)(react-dom@19.2.1(react@19.2.1))(react@19.2.1) @@ -217,8 +215,8 @@ importers: specifier: ^1.2.1 version: 1.2.1 lexical: - specifier: 0.37.0 - version: 0.37.0 + specifier: ^0.38.2 + version: 0.38.2 line-clamp: specifier: ^1.0.0 version: 1.0.0 @@ -2135,125 +2133,77 @@ packages: '@jridgewell/trace-mapping@0.3.9': resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} - '@lexical/clipboard@0.36.2': - resolution: {integrity: sha512-l7z52jltlMz1HmJRmG7ZdxySPjheRRxdV/75QEnzalMtqfLPgh4G5IpycISjbX+95PgEaC6rXbcjPix0CyHDJg==} - - '@lexical/clipboard@0.37.0': - resolution: {integrity: sha512-hRwASFX/ilaI5r8YOcZuQgONFshRgCPfdxfofNL7uruSFYAO6LkUhsjzZwUgf0DbmCJmbBADFw15FSthgCUhGA==} - '@lexical/clipboard@0.38.2': resolution: {integrity: sha512-dDShUplCu8/o6BB9ousr3uFZ9bltR+HtleF/Tl8FXFNPpZ4AXhbLKUoJuucRuIr+zqT7RxEv/3M6pk/HEoE6NQ==} - '@lexical/code@0.36.2': - resolution: {integrity: sha512-dfS62rNo3uKwNAJQ39zC+8gYX0k8UAoW7u+JPIqx+K2VPukZlvpsPLNGft15pdWBkHc7Pv+o9gJlB6gGv+EBfA==} - '@lexical/code@0.38.2': resolution: {integrity: sha512-wpqgbmPsfi/+8SYP0zI2kml09fGPRhzO5litR9DIbbSGvcbawMbRNcKLO81DaTbsJRnBJiQvbBBBJAwZKRqgBw==} - '@lexical/devtools-core@0.36.2': - resolution: {integrity: sha512-G+XW7gR/SCx3YgX4FK9wAIn6AIOkC+j8zRPWrS3GQNZ15CE0QkwQl3IyQ7XW9KzWmdRMs6yTmTVnENFa1JLzXg==} + '@lexical/devtools-core@0.38.2': + resolution: {integrity: sha512-hlN0q7taHNzG47xKynQLCAFEPOL8l6IP79C2M18/FE1+htqNP35q4rWhYhsptGlKo4me4PtiME7mskvr7T4yqA==} peerDependencies: react: '>=17.x' react-dom: '>=17.x' - '@lexical/dragon@0.36.2': - resolution: {integrity: sha512-VWNjYaH74uQ8MFKkl80pTofojpEnTYSX2sgHyZmo1Lk1cKLHK25pMnWgAxPAMLQD5/RW/2PtZcK+j0Kfoe5lSQ==} - - '@lexical/extension@0.36.2': - resolution: {integrity: sha512-NWxtqMFMzScq4Eemqp1ST2KREIfj57fUbn7qHv+mMnYgQZK4iIhrHKo5klonxi1oBURcxUZMIbdtH7MJ4BdisA==} - - '@lexical/extension@0.37.0': - resolution: {integrity: sha512-Z58f2tIdz9bn8gltUu5cVg37qROGha38dUZv20gI2GeNugXAkoPzJYEcxlI1D/26tkevJ/7VaFUr9PTk+iKmaA==} + '@lexical/dragon@0.38.2': + resolution: {integrity: sha512-riOhgo+l4oN50RnLGhcqeUokVlMZRc+NDrxRNs2lyKSUdC4vAhAmAVUHDqYPyb4K4ZSw4ebZ3j8hI2zO4O3BbA==} '@lexical/extension@0.38.2': resolution: {integrity: sha512-qbUNxEVjAC0kxp7hEMTzktj0/51SyJoIJWK6Gm790b4yNBq82fEPkksfuLkRg9VQUteD0RT1Nkjy8pho8nNamw==} - '@lexical/hashtag@0.36.2': - resolution: {integrity: sha512-WdmKtzXFcahQT3ShFDeHF6LCR5C8yvFCj3ImI09rZwICrYeonbMrzsBUxS1joBz0HQ+ufF9Tx+RxLvGWx6WxzQ==} + '@lexical/hashtag@0.38.2': + resolution: {integrity: sha512-jNI4Pv+plth39bjOeeQegMypkjDmoMWBMZtV0lCynBpkkPFlfMnyL9uzW/IxkZnX8LXWSw5mbWk07nqOUNTCrA==} - '@lexical/history@0.36.2': - resolution: {integrity: sha512-pnS36gyMWz1yq/3Z2jv0gUxjJfas5j0GZOM4rFTzDAHjRVc5q3Ua4ElwekdcLaPPGpUlcg3jghIGWa2pSeoPvA==} - - '@lexical/html@0.36.2': - resolution: {integrity: sha512-fgqALzgKnoy93G0yFyYD4C4qJTSMZyUt4JE5kj/POFwWNOnXThIqJhQGwBvH/ibImpIfOeds2TrSr8PbStlrNg==} - - '@lexical/html@0.37.0': - resolution: {integrity: sha512-oTsBc45eL8/lmF7fqGR+UCjrJYP04gumzf5nk4TczrxWL2pM4GIMLLKG1mpQI2H1MDiRLzq3T/xdI7Gh74z7Zw==} + '@lexical/history@0.38.2': + resolution: {integrity: sha512-QWPwoVDMe/oJ0+TFhy78TDi7TWU/8bcDRFUNk1nWgbq7+2m+5MMoj90LmOFwakQHnCVovgba2qj+atZrab1dsQ==} '@lexical/html@0.38.2': resolution: {integrity: sha512-pC5AV+07bmHistRwgG3NJzBMlIzSdxYO6rJU4eBNzyR4becdiLsI4iuv+aY7PhfSv+SCs7QJ9oc4i5caq48Pkg==} - '@lexical/link@0.36.2': - resolution: {integrity: sha512-Zb+DeHA1po8VMiOAAXsBmAHhfWmQttsUkI5oiZUmOXJruRuQ2rVr01NoxHpoEpLwHOABVNzD3PMbwov+g3c7lg==} - - '@lexical/list@0.36.2': - resolution: {integrity: sha512-JpaIaE0lgNUrAR7iaCaIoETcCKG9EvZjM3G71VxiexTs7PltmEMq36LUlO2goafWurP7knG2rUpVnTcuSbYYeA==} - - '@lexical/list@0.37.0': - resolution: {integrity: sha512-AOC6yAA3mfNvJKbwo+kvAbPJI+13yF2ISA65vbA578CugvJ08zIVgM+pSzxquGhD0ioJY3cXVW7+gdkCP1qu5g==} + '@lexical/link@0.38.2': + resolution: {integrity: sha512-UOKTyYqrdCR9+7GmH6ZVqJTmqYefKGMUHMGljyGks+OjOGZAQs78S1QgcPEqltDy+SSdPSYK7wAo6gjxZfEq9g==} '@lexical/list@0.38.2': resolution: {integrity: sha512-OQm9TzatlMrDZGxMxbozZEHzMJhKxAbH1TOnOGyFfzpfjbnFK2y8oLeVsfQZfZRmiqQS4Qc/rpFnRP2Ax5dsbA==} - '@lexical/mark@0.36.2': - resolution: {integrity: sha512-n0MNXtGH+1i43hglgHjpQV0093HmIiFR7Budg2BJb8ZNzO1KZRqeXAHlA5ZzJ698FkAnS4R5bqG9tZ0JJHgAuA==} + '@lexical/mark@0.38.2': + resolution: {integrity: sha512-U+8KGwc3cP5DxSs15HfkP2YZJDs5wMbWQAwpGqep9bKphgxUgjPViKhdi+PxIt2QEzk7WcoZWUsK1d2ty/vSmg==} - '@lexical/markdown@0.36.2': - resolution: {integrity: sha512-jI4McaVKUo8ADOYNCB5LnYyxXDyOWBOofM05r42R9QIMyUxGryo43WNPMAYXzCgtHlkQv+FNles9OlQY0IlAag==} + '@lexical/markdown@0.38.2': + resolution: {integrity: sha512-ykQJ9KUpCs1+Ak6ZhQMP6Slai4/CxfLEGg/rSHNVGbcd7OaH/ICtZN5jOmIe9ExfXMWy1o8PyMu+oAM3+AWFgA==} - '@lexical/offset@0.36.2': - resolution: {integrity: sha512-+QQNwzFW/joes3DhNINpGdEX6O5scUTs4n8pYDyM/3pWb+8oCHRaRtEmpUU9HStbdy/pK2kQ9XdztkrNvP/ilA==} + '@lexical/offset@0.38.2': + resolution: {integrity: sha512-uDky2palcY+gE6WTv6q2umm2ioTUnVqcaWlEcchP6A310rI08n6rbpmkaLSIh3mT2GJQN2QcN2x0ct5BQmKIpA==} - '@lexical/overflow@0.36.2': - resolution: {integrity: sha512-bLaEe93iZIJH5wDh6e/DTZVNz7xO7lMS5akcJW8CIwopr4I/Qv2uCvc4G1bMMHx2xM1gVxstn5rFgIUP8/Gqlg==} + '@lexical/overflow@0.38.2': + resolution: {integrity: sha512-f6vkTf+YZF0EuKvUK3goh4jrnF+Z0koiNMO+7rhSMLooc5IlD/4XXix4ZLiIktUWq4BhO84b82qtrO+6oPUxtw==} - '@lexical/plain-text@0.36.2': - resolution: {integrity: sha512-c9F/+WHl2QuXVhu+1bBVo6BIrSjCcixLe5ePKxoUpy+B7W72s3VCoAQZp+pmtPIyodDLmZAx78hZBBlzoIOeeg==} + '@lexical/plain-text@0.38.2': + resolution: {integrity: sha512-xRYNHJJFCbaQgr0uErW8Im2Phv1nWHIT4VSoAlBYqLuVGZBD4p61dqheBwqXWlGGJFk+MY5C5URLiMicgpol7A==} - '@lexical/react@0.36.2': - resolution: {integrity: sha512-mPVm1BmeuMsMpVyUplgc0btOI8+Vm9bZj4AftgfMSkvzkr8i6NkLn8LV5IlEnoRvxXkjOExwlwBwdQte5ZGvNw==} + '@lexical/react@0.38.2': + resolution: {integrity: sha512-M3z3MkWyw3Msg4Hojr5TnO4TzL71NVPVNGoavESjdgJbTdv1ezcQqjE4feq+qs7H9jytZeuK8wsEOJfSPmNd8w==} peerDependencies: react: '>=17.x' react-dom: '>=17.x' - '@lexical/rich-text@0.36.2': - resolution: {integrity: sha512-dZ7zAIv5NBrh1ApxIT9bayn96zfQHHdnT+oaqmR+q100Vo2uROeR/ZF5igeAuwYGM1Z3ZWDBvNxRKd1d6FWiZw==} - - '@lexical/selection@0.36.2': - resolution: {integrity: sha512-n96joW3HCKBmPeESR172BxVE+m8V9SdidQm4kKb9jOZ1Ota+tnam2386TeI6795TWwgjDQJPK3HZNKcX6Gb+Bg==} - - '@lexical/selection@0.37.0': - resolution: {integrity: sha512-Lix1s2r71jHfsTEs4q/YqK2s3uXKOnyA3fd1VDMWysO+bZzRwEO5+qyDvENZ0WrXSDCnlibNFV1HttWX9/zqyw==} + '@lexical/rich-text@0.38.2': + resolution: {integrity: sha512-eFjeOT7YnDZYpty7Zlwlct0UxUSaYu53uLYG+Prs3NoKzsfEK7e7nYsy/BbQFfk5HoM1pYuYxFR2iIX62+YHGw==} '@lexical/selection@0.38.2': resolution: {integrity: sha512-eMFiWlBH6bEX9U9sMJ6PXPxVXTrihQfFeiIlWLuTpEIDF2HRz7Uo1KFRC/yN6q0DQaj7d9NZYA6Mei5DoQuz5w==} - '@lexical/table@0.36.2': - resolution: {integrity: sha512-96rNNPiVbC65i+Jn1QzIsehCS7UVUc69ovrh9Bt4+pXDebZSdZai153Q7RUq8q3AQ5ocK4/SA2kLQfMu0grj3Q==} - - '@lexical/table@0.37.0': - resolution: {integrity: sha512-g7S8ml8kIujEDLWlzYKETgPCQ2U9oeWqdytRuHjHGi/rjAAGHSej5IRqTPIMxNP3VVQHnBoQ+Y9hBtjiuddhgQ==} - '@lexical/table@0.38.2': resolution: {integrity: sha512-uu0i7yz0nbClmHOO5ZFsinRJE6vQnFz2YPblYHAlNigiBedhqMwSv5bedrzDq8nTTHwych3mC63tcyKIrM+I1g==} - '@lexical/text@0.36.2': - resolution: {integrity: sha512-IbbqgRdMAD6Uk9b2+qSVoy+8RVcczrz6OgXvg39+EYD+XEC7Rbw7kDTWzuNSJJpP7vxSO8YDZSaIlP5gNH3qKA==} - '@lexical/text@0.38.2': resolution: {integrity: sha512-+juZxUugtC4T37aE3P0l4I9tsWbogDUnTI/mgYk4Ht9g+gLJnhQkzSA8chIyfTxbj5i0A8yWrUUSw+/xA7lKUQ==} - '@lexical/utils@0.36.2': - resolution: {integrity: sha512-P9+t2Ob10YNGYT/PWEER+1EqH8SAjCNRn+7SBvKbr0IdleGF2JvzbJwAWaRwZs1c18P11XdQZ779dGvWlfwBIw==} - - '@lexical/utils@0.37.0': - resolution: {integrity: sha512-CFp4diY/kR5RqhzQSl/7SwsMod1sgLpI1FBifcOuJ6L/S6YywGpEB4B7aV5zqW21A/jU2T+2NZtxSUn6S+9gMg==} - '@lexical/utils@0.38.2': resolution: {integrity: sha512-y+3rw15r4oAWIEXicUdNjfk8018dbKl7dWHqGHVEtqzAYefnEYdfD2FJ5KOTXfeoYfxi8yOW7FvzS4NZDi8Bfw==} - '@lexical/yjs@0.36.2': - resolution: {integrity: sha512-gZ66Mw+uKXTO8KeX/hNKAinXbFg3gnNYraG76lBXCwb/Ka3q34upIY9FUeGOwGVaau3iIDQhE49I+6MugAX2FQ==} + '@lexical/yjs@0.38.2': + resolution: {integrity: sha512-fg6ZHNrVQmy1AAxaTs8HrFbeNTJCaCoEDPi6pqypHQU3QVfqr4nq0L0EcHU/TRlR1CeduEPvZZIjUUxWTZ0u8g==} peerDependencies: yjs: '>=13.5.22' @@ -6363,8 +6313,8 @@ packages: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} - lexical@0.37.0: - resolution: {integrity: sha512-r5VJR2TioQPAsZATfktnJFrGIiy6gjQN8b/+0a2u1d7/QTH7lhbB7byhGSvcq1iaa1TV/xcf/pFV55a5V5hTDQ==} + lexical@0.38.2: + resolution: {integrity: sha512-JJmfsG3c4gwBHzUGffbV7ifMNkKAWMCnYE3xJl87gty7hjyV5f3xq7eqTjP5HFYvO4XpjJvvWO2/djHp5S10tw==} lib0@0.2.114: resolution: {integrity: sha512-gcxmNFzA4hv8UYi8j43uPlQ7CGcyMJ2KQb5kZASw6SnAKAf10hK12i2fjrS3Cl/ugZa5Ui6WwIu1/6MIXiHttQ==} @@ -10842,265 +10792,165 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 - '@lexical/clipboard@0.36.2': - dependencies: - '@lexical/html': 0.36.2 - '@lexical/list': 0.36.2 - '@lexical/selection': 0.36.2 - '@lexical/utils': 0.36.2 - lexical: 0.37.0 - - '@lexical/clipboard@0.37.0': - dependencies: - '@lexical/html': 0.37.0 - '@lexical/list': 0.37.0 - '@lexical/selection': 0.37.0 - '@lexical/utils': 0.37.0 - lexical: 0.37.0 - '@lexical/clipboard@0.38.2': dependencies: '@lexical/html': 0.38.2 '@lexical/list': 0.38.2 '@lexical/selection': 0.38.2 '@lexical/utils': 0.38.2 - lexical: 0.37.0 - - '@lexical/code@0.36.2': - dependencies: - '@lexical/utils': 0.36.2 - lexical: 0.37.0 - prismjs: 1.30.0 + lexical: 0.38.2 '@lexical/code@0.38.2': dependencies: '@lexical/utils': 0.38.2 - lexical: 0.37.0 + lexical: 0.38.2 prismjs: 1.30.0 - '@lexical/devtools-core@0.36.2(react-dom@19.2.1(react@19.2.1))(react@19.2.1)': + '@lexical/devtools-core@0.38.2(react-dom@19.2.1(react@19.2.1))(react@19.2.1)': dependencies: - '@lexical/html': 0.36.2 - '@lexical/link': 0.36.2 - '@lexical/mark': 0.36.2 - '@lexical/table': 0.36.2 - '@lexical/utils': 0.36.2 - lexical: 0.37.0 + '@lexical/html': 0.38.2 + '@lexical/link': 0.38.2 + '@lexical/mark': 0.38.2 + '@lexical/table': 0.38.2 + '@lexical/utils': 0.38.2 + lexical: 0.38.2 react: 19.2.1 react-dom: 19.2.1(react@19.2.1) - '@lexical/dragon@0.36.2': + '@lexical/dragon@0.38.2': dependencies: - '@lexical/extension': 0.36.2 - lexical: 0.37.0 - - '@lexical/extension@0.36.2': - dependencies: - '@lexical/utils': 0.36.2 - '@preact/signals-core': 1.12.1 - lexical: 0.37.0 - - '@lexical/extension@0.37.0': - dependencies: - '@lexical/utils': 0.37.0 - '@preact/signals-core': 1.12.1 - lexical: 0.37.0 + '@lexical/extension': 0.38.2 + lexical: 0.38.2 '@lexical/extension@0.38.2': dependencies: '@lexical/utils': 0.38.2 '@preact/signals-core': 1.12.1 - lexical: 0.37.0 + lexical: 0.38.2 - '@lexical/hashtag@0.36.2': + '@lexical/hashtag@0.38.2': dependencies: - '@lexical/text': 0.36.2 - '@lexical/utils': 0.36.2 - lexical: 0.37.0 + '@lexical/text': 0.38.2 + '@lexical/utils': 0.38.2 + lexical: 0.38.2 - '@lexical/history@0.36.2': + '@lexical/history@0.38.2': dependencies: - '@lexical/extension': 0.36.2 - '@lexical/utils': 0.36.2 - lexical: 0.37.0 - - '@lexical/html@0.36.2': - dependencies: - '@lexical/selection': 0.36.2 - '@lexical/utils': 0.36.2 - lexical: 0.37.0 - - '@lexical/html@0.37.0': - dependencies: - '@lexical/selection': 0.37.0 - '@lexical/utils': 0.37.0 - lexical: 0.37.0 + '@lexical/extension': 0.38.2 + '@lexical/utils': 0.38.2 + lexical: 0.38.2 '@lexical/html@0.38.2': dependencies: '@lexical/selection': 0.38.2 '@lexical/utils': 0.38.2 - lexical: 0.37.0 + lexical: 0.38.2 - '@lexical/link@0.36.2': + '@lexical/link@0.38.2': dependencies: - '@lexical/extension': 0.36.2 - '@lexical/utils': 0.36.2 - lexical: 0.37.0 - - '@lexical/list@0.36.2': - dependencies: - '@lexical/extension': 0.36.2 - '@lexical/selection': 0.36.2 - '@lexical/utils': 0.36.2 - lexical: 0.37.0 - - '@lexical/list@0.37.0': - dependencies: - '@lexical/extension': 0.37.0 - '@lexical/selection': 0.37.0 - '@lexical/utils': 0.37.0 - lexical: 0.37.0 + '@lexical/extension': 0.38.2 + '@lexical/utils': 0.38.2 + lexical: 0.38.2 '@lexical/list@0.38.2': dependencies: '@lexical/extension': 0.38.2 '@lexical/selection': 0.38.2 '@lexical/utils': 0.38.2 - lexical: 0.37.0 + lexical: 0.38.2 - '@lexical/mark@0.36.2': + '@lexical/mark@0.38.2': dependencies: - '@lexical/utils': 0.36.2 - lexical: 0.37.0 + '@lexical/utils': 0.38.2 + lexical: 0.38.2 - '@lexical/markdown@0.36.2': + '@lexical/markdown@0.38.2': dependencies: - '@lexical/code': 0.36.2 - '@lexical/link': 0.36.2 - '@lexical/list': 0.36.2 - '@lexical/rich-text': 0.36.2 - '@lexical/text': 0.36.2 - '@lexical/utils': 0.36.2 - lexical: 0.37.0 + '@lexical/code': 0.38.2 + '@lexical/link': 0.38.2 + '@lexical/list': 0.38.2 + '@lexical/rich-text': 0.38.2 + '@lexical/text': 0.38.2 + '@lexical/utils': 0.38.2 + lexical: 0.38.2 - '@lexical/offset@0.36.2': + '@lexical/offset@0.38.2': dependencies: - lexical: 0.37.0 + lexical: 0.38.2 - '@lexical/overflow@0.36.2': + '@lexical/overflow@0.38.2': dependencies: - lexical: 0.37.0 + lexical: 0.38.2 - '@lexical/plain-text@0.36.2': + '@lexical/plain-text@0.38.2': dependencies: - '@lexical/clipboard': 0.36.2 - '@lexical/dragon': 0.36.2 - '@lexical/selection': 0.36.2 - '@lexical/utils': 0.36.2 - lexical: 0.37.0 + '@lexical/clipboard': 0.38.2 + '@lexical/dragon': 0.38.2 + '@lexical/selection': 0.38.2 + '@lexical/utils': 0.38.2 + lexical: 0.38.2 - '@lexical/react@0.36.2(react-dom@19.2.1(react@19.2.1))(react@19.2.1)(yjs@13.6.27)': + '@lexical/react@0.38.2(react-dom@19.2.1(react@19.2.1))(react@19.2.1)(yjs@13.6.27)': dependencies: '@floating-ui/react': 0.27.16(react-dom@19.2.1(react@19.2.1))(react@19.2.1) - '@lexical/devtools-core': 0.36.2(react-dom@19.2.1(react@19.2.1))(react@19.2.1) - '@lexical/dragon': 0.36.2 - '@lexical/extension': 0.36.2 - '@lexical/hashtag': 0.36.2 - '@lexical/history': 0.36.2 - '@lexical/link': 0.36.2 - '@lexical/list': 0.36.2 - '@lexical/mark': 0.36.2 - '@lexical/markdown': 0.36.2 - '@lexical/overflow': 0.36.2 - '@lexical/plain-text': 0.36.2 - '@lexical/rich-text': 0.36.2 - '@lexical/table': 0.36.2 - '@lexical/text': 0.36.2 - '@lexical/utils': 0.36.2 - '@lexical/yjs': 0.36.2(yjs@13.6.27) - lexical: 0.37.0 + '@lexical/devtools-core': 0.38.2(react-dom@19.2.1(react@19.2.1))(react@19.2.1) + '@lexical/dragon': 0.38.2 + '@lexical/extension': 0.38.2 + '@lexical/hashtag': 0.38.2 + '@lexical/history': 0.38.2 + '@lexical/link': 0.38.2 + '@lexical/list': 0.38.2 + '@lexical/mark': 0.38.2 + '@lexical/markdown': 0.38.2 + '@lexical/overflow': 0.38.2 + '@lexical/plain-text': 0.38.2 + '@lexical/rich-text': 0.38.2 + '@lexical/table': 0.38.2 + '@lexical/text': 0.38.2 + '@lexical/utils': 0.38.2 + '@lexical/yjs': 0.38.2(yjs@13.6.27) + lexical: 0.38.2 react: 19.2.1 react-dom: 19.2.1(react@19.2.1) react-error-boundary: 6.0.0(react@19.2.1) transitivePeerDependencies: - yjs - '@lexical/rich-text@0.36.2': + '@lexical/rich-text@0.38.2': dependencies: - '@lexical/clipboard': 0.36.2 - '@lexical/dragon': 0.36.2 - '@lexical/selection': 0.36.2 - '@lexical/utils': 0.36.2 - lexical: 0.37.0 - - '@lexical/selection@0.36.2': - dependencies: - lexical: 0.37.0 - - '@lexical/selection@0.37.0': - dependencies: - lexical: 0.37.0 + '@lexical/clipboard': 0.38.2 + '@lexical/dragon': 0.38.2 + '@lexical/selection': 0.38.2 + '@lexical/utils': 0.38.2 + lexical: 0.38.2 '@lexical/selection@0.38.2': dependencies: - lexical: 0.37.0 - - '@lexical/table@0.36.2': - dependencies: - '@lexical/clipboard': 0.36.2 - '@lexical/extension': 0.36.2 - '@lexical/utils': 0.36.2 - lexical: 0.37.0 - - '@lexical/table@0.37.0': - dependencies: - '@lexical/clipboard': 0.37.0 - '@lexical/extension': 0.37.0 - '@lexical/utils': 0.37.0 - lexical: 0.37.0 + lexical: 0.38.2 '@lexical/table@0.38.2': dependencies: '@lexical/clipboard': 0.38.2 '@lexical/extension': 0.38.2 '@lexical/utils': 0.38.2 - lexical: 0.37.0 - - '@lexical/text@0.36.2': - dependencies: - lexical: 0.37.0 + lexical: 0.38.2 '@lexical/text@0.38.2': dependencies: - lexical: 0.37.0 - - '@lexical/utils@0.36.2': - dependencies: - '@lexical/list': 0.36.2 - '@lexical/selection': 0.36.2 - '@lexical/table': 0.36.2 - lexical: 0.37.0 - - '@lexical/utils@0.37.0': - dependencies: - '@lexical/list': 0.37.0 - '@lexical/selection': 0.37.0 - '@lexical/table': 0.37.0 - lexical: 0.37.0 + lexical: 0.38.2 '@lexical/utils@0.38.2': dependencies: '@lexical/list': 0.38.2 '@lexical/selection': 0.38.2 '@lexical/table': 0.38.2 - lexical: 0.37.0 + lexical: 0.38.2 - '@lexical/yjs@0.36.2(yjs@13.6.27)': + '@lexical/yjs@0.38.2(yjs@13.6.27)': dependencies: - '@lexical/offset': 0.36.2 - '@lexical/selection': 0.36.2 - lexical: 0.37.0 + '@lexical/offset': 0.38.2 + '@lexical/selection': 0.38.2 + lexical: 0.38.2 yjs: 13.6.27 '@mdx-js/loader@3.1.1(webpack@5.103.0(esbuild@0.25.0)(uglify-js@3.19.3))': @@ -15815,7 +15665,7 @@ snapshots: prelude-ls: 1.2.1 type-check: 0.4.0 - lexical@0.37.0: {} + lexical@0.38.2: {} lib0@0.2.114: dependencies: From 2f963748377c267483f23bb8ee2f365ee747d652 Mon Sep 17 00:00:00 2001 From: yyh <92089059+lyzno1@users.noreply.github.com> Date: Mon, 8 Dec 2025 14:09:03 +0800 Subject: [PATCH 008/128] perf: optimize marketplace card re-renders with memoization (#29263) --- .../plugins/card/base/download-count.tsx | 6 ++++- .../plugins/card/card-more-info.tsx | 6 ++++- .../plugins/marketplace/list/card-wrapper.tsx | 26 +++++++++++++++---- web/app/components/plugins/provider-card.tsx | 14 +++++++--- 4 files changed, 41 insertions(+), 11 deletions(-) diff --git a/web/app/components/plugins/card/base/download-count.tsx b/web/app/components/plugins/card/base/download-count.tsx index 7b3ae4de28..016a976777 100644 --- a/web/app/components/plugins/card/base/download-count.tsx +++ b/web/app/components/plugins/card/base/download-count.tsx @@ -1,3 +1,4 @@ +import React from 'react' import { RiInstallLine } from '@remixicon/react' import { formatNumber } from '@/utils/format' @@ -5,7 +6,7 @@ type Props = { downloadCount: number } -const DownloadCount = ({ +const DownloadCountComponent = ({ downloadCount, }: Props) => { return ( @@ -16,4 +17,7 @@ const DownloadCount = ({ ) } +// Memoize to prevent unnecessary re-renders +const DownloadCount = React.memo(DownloadCountComponent) + export default DownloadCount diff --git a/web/app/components/plugins/card/card-more-info.tsx b/web/app/components/plugins/card/card-more-info.tsx index 48533615ab..d81c941e96 100644 --- a/web/app/components/plugins/card/card-more-info.tsx +++ b/web/app/components/plugins/card/card-more-info.tsx @@ -1,3 +1,4 @@ +import React from 'react' import DownloadCount from './base/download-count' type Props = { @@ -5,7 +6,7 @@ type Props = { tags: string[] } -const CardMoreInfo = ({ +const CardMoreInfoComponent = ({ downloadCount, tags, }: Props) => { @@ -33,4 +34,7 @@ const CardMoreInfo = ({ ) } +// Memoize to prevent unnecessary re-renders when tags array hasn't changed +const CardMoreInfo = React.memo(CardMoreInfoComponent) + export default CardMoreInfo diff --git a/web/app/components/plugins/marketplace/list/card-wrapper.tsx b/web/app/components/plugins/marketplace/list/card-wrapper.tsx index d2a38b3ce3..785718e697 100644 --- a/web/app/components/plugins/marketplace/list/card-wrapper.tsx +++ b/web/app/components/plugins/marketplace/list/card-wrapper.tsx @@ -1,4 +1,5 @@ 'use client' +import React, { useMemo } from 'react' import { useTheme } from 'next-themes' import { RiArrowRightUpLine } from '@remixicon/react' import { getPluginDetailLinkInMarketplace, getPluginLinkInMarketplace } from '../utils' @@ -17,7 +18,7 @@ type CardWrapperProps = { showInstallButton?: boolean locale?: string } -const CardWrapper = ({ +const CardWrapperComponent = ({ plugin, showInstallButton, locale, @@ -31,6 +32,18 @@ const CardWrapper = ({ const { locale: localeFromLocale } = useI18N() const { getTagLabel } = useTags(t) + // Memoize marketplace link params to prevent unnecessary re-renders + const marketplaceLinkParams = useMemo(() => ({ + language: localeFromLocale, + theme, + }), [localeFromLocale, theme]) + + // Memoize tag labels to prevent recreating array on every render + const tagLabels = useMemo(() => + plugin.tags.map(tag => getTagLabel(tag.name)), + [plugin.tags, getTagLabel], + ) + if (showInstallButton) { return (
getTagLabel(tag.name))} + tags={tagLabels} /> } /> { -
+
- + + + Esc + +
+ {cachedImages[currentImage.url].status === 'loading' && ( + + )} + {cachedImages[currentImage.url].status === 'error' && ( +
+ {`Failed to load image: ${currentImage.url}. Please try again.`} + +
+ )} + {cachedImages[currentImage.url].status === 'loaded' && ( +
+ {currentImage.name} +
+ {currentImage.name} + · + {`${cachedImages[currentImage.url].width} ×  ${cachedImages[currentImage.url].height}`} + · + {formatFileSize(currentImage.size)} +
+
+ )} + + +
, + document.body, + ) +} + +export default ImagePreviewer diff --git a/web/app/components/datasets/common/image-uploader/constants.ts b/web/app/components/datasets/common/image-uploader/constants.ts new file mode 100644 index 0000000000..671ed94fcf --- /dev/null +++ b/web/app/components/datasets/common/image-uploader/constants.ts @@ -0,0 +1,7 @@ +export const ACCEPT_TYPES = ['jpg', 'jpeg', 'png', 'gif'] + +export const DEFAULT_IMAGE_FILE_SIZE_LIMIT = 2 + +export const DEFAULT_IMAGE_FILE_BATCH_LIMIT = 5 + +export const DEFAULT_SINGLE_CHUNK_ATTACHMENT_LIMIT = 10 diff --git a/web/app/components/datasets/common/image-uploader/hooks/use-upload.ts b/web/app/components/datasets/common/image-uploader/hooks/use-upload.ts new file mode 100644 index 0000000000..aefe48f0cd --- /dev/null +++ b/web/app/components/datasets/common/image-uploader/hooks/use-upload.ts @@ -0,0 +1,273 @@ +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' +import { useFileUploadConfig } from '@/service/use-common' +import type { FileEntity, FileUploadConfig } from '../types' +import { getFileType, getFileUploadConfig, traverseFileEntry } from '../utils' +import Toast from '@/app/components/base/toast' +import { useTranslation } from 'react-i18next' +import { ACCEPT_TYPES } from '../constants' +import { useFileStore } from '../store' +import { produce } from 'immer' +import { fileUpload, getFileUploadErrorMessage } from '@/app/components/base/file-uploader/utils' +import { v4 as uuid4 } from 'uuid' + +export const useUpload = () => { + const { t } = useTranslation() + const fileStore = useFileStore() + + const [dragging, setDragging] = useState(false) + const uploaderRef = useRef(null) + const dragRef = useRef(null) + const dropRef = useRef(null) + + const { data: fileUploadConfigResponse } = useFileUploadConfig() + + const fileUploadConfig: FileUploadConfig = useMemo(() => { + return getFileUploadConfig(fileUploadConfigResponse) + }, [fileUploadConfigResponse]) + + const handleDragEnter = (e: DragEvent) => { + e.preventDefault() + e.stopPropagation() + if (e.target !== dragRef.current) + setDragging(true) + } + const handleDragOver = (e: DragEvent) => { + e.preventDefault() + e.stopPropagation() + } + const handleDragLeave = (e: DragEvent) => { + e.preventDefault() + e.stopPropagation() + if (e.target === dragRef.current) + setDragging(false) + } + + const checkFileType = useCallback((file: File) => { + const ext = getFileType(file) + return ACCEPT_TYPES.includes(ext.toLowerCase()) + }, []) + + const checkFileSize = useCallback((file: File) => { + const { size } = file + return size <= fileUploadConfig.imageFileSizeLimit * 1024 * 1024 + }, [fileUploadConfig]) + + const showErrorMessage = useCallback((type: 'type' | 'size') => { + if (type === 'type') + Toast.notify({ type: 'error', message: t('common.fileUploader.fileExtensionNotSupport') }) + else + Toast.notify({ type: 'error', message: t('dataset.imageUploader.fileSizeLimitExceeded', { size: fileUploadConfig.imageFileSizeLimit }) }) + }, [fileUploadConfig, t]) + + const getValidFiles = useCallback((files: File[]) => { + let validType = true + let validSize = true + const validFiles = files.filter((file) => { + if (!checkFileType(file)) { + validType = false + return false + } + if (!checkFileSize(file)) { + validSize = false + return false + } + return true + }) + if (!validType) + showErrorMessage('type') + else if (!validSize) + showErrorMessage('size') + + return validFiles + }, [checkFileType, checkFileSize, showErrorMessage]) + + const selectHandle = () => { + if (uploaderRef.current) + uploaderRef.current.click() + } + + const handleAddFile = useCallback((newFile: FileEntity) => { + const { + files, + setFiles, + } = fileStore.getState() + + const newFiles = produce(files, (draft) => { + draft.push(newFile) + }) + setFiles(newFiles) + }, [fileStore]) + + const handleUpdateFile = useCallback((newFile: FileEntity) => { + const { + files, + setFiles, + } = fileStore.getState() + + const newFiles = produce(files, (draft) => { + const index = draft.findIndex(file => file.id === newFile.id) + + if (index > -1) + draft[index] = newFile + }) + setFiles(newFiles) + }, [fileStore]) + + const handleRemoveFile = useCallback((fileId: string) => { + const { + files, + setFiles, + } = fileStore.getState() + + const newFiles = files.filter(file => file.id !== fileId) + setFiles(newFiles) + }, [fileStore]) + + const handleReUploadFile = useCallback((fileId: string) => { + const { + files, + setFiles, + } = fileStore.getState() + const index = files.findIndex(file => file.id === fileId) + + if (index > -1) { + const uploadingFile = files[index] + const newFiles = produce(files, (draft) => { + draft[index].progress = 0 + }) + setFiles(newFiles) + fileUpload({ + file: uploadingFile.originalFile!, + onProgressCallback: (progress) => { + handleUpdateFile({ ...uploadingFile, progress }) + }, + onSuccessCallback: (res) => { + handleUpdateFile({ ...uploadingFile, uploadedId: res.id, progress: 100 }) + }, + onErrorCallback: (error?: any) => { + const errorMessage = getFileUploadErrorMessage(error, t('common.fileUploader.uploadFromComputerUploadError'), t) + Toast.notify({ type: 'error', message: errorMessage }) + handleUpdateFile({ ...uploadingFile, progress: -1 }) + }, + }) + } + }, [fileStore, t, handleUpdateFile]) + + const handleLocalFileUpload = useCallback((file: File) => { + const reader = new FileReader() + const isImage = file.type.startsWith('image') + + reader.addEventListener( + 'load', + () => { + const uploadingFile = { + id: uuid4(), + name: file.name, + extension: getFileType(file), + mimeType: file.type, + size: file.size, + progress: 0, + originalFile: file, + base64Url: isImage ? reader.result as string : '', + } + handleAddFile(uploadingFile) + fileUpload({ + file: uploadingFile.originalFile, + onProgressCallback: (progress) => { + handleUpdateFile({ ...uploadingFile, progress }) + }, + onSuccessCallback: (res) => { + handleUpdateFile({ + ...uploadingFile, + extension: res.extension, + mimeType: res.mime_type, + size: res.size, + uploadedId: res.id, + progress: 100, + }) + }, + onErrorCallback: (error?: any) => { + const errorMessage = getFileUploadErrorMessage(error, t('common.fileUploader.uploadFromComputerUploadError'), t) + Toast.notify({ type: 'error', message: errorMessage }) + handleUpdateFile({ ...uploadingFile, progress: -1 }) + }, + }) + }, + false, + ) + reader.addEventListener( + 'error', + () => { + Toast.notify({ type: 'error', message: t('common.fileUploader.uploadFromComputerReadError') }) + }, + false, + ) + reader.readAsDataURL(file) + }, [t, handleAddFile, handleUpdateFile]) + + const handleFileUpload = useCallback((newFiles: File[]) => { + const { files } = fileStore.getState() + const { singleChunkAttachmentLimit } = fileUploadConfig + if (newFiles.length === 0) return + if (files.length + newFiles.length > singleChunkAttachmentLimit) { + Toast.notify({ + type: 'error', + message: t('datasetHitTesting.imageUploader.singleChunkAttachmentLimitTooltip', { limit: singleChunkAttachmentLimit }), + }) + return + } + for (const file of newFiles) + handleLocalFileUpload(file) + }, [fileUploadConfig, fileStore, t, handleLocalFileUpload]) + + const fileChangeHandle = useCallback((e: React.ChangeEvent) => { + const { imageFileBatchLimit } = fileUploadConfig + const files = Array.from(e.target.files ?? []).slice(0, imageFileBatchLimit) + const validFiles = getValidFiles(files) + handleFileUpload(validFiles) + }, [getValidFiles, handleFileUpload, fileUploadConfig]) + + const handleDrop = useCallback(async (e: DragEvent) => { + e.preventDefault() + e.stopPropagation() + setDragging(false) + if (!e.dataTransfer) return + const nested = await Promise.all( + Array.from(e.dataTransfer.items).map((it) => { + const entry = (it as any).webkitGetAsEntry?.() + if (entry) return traverseFileEntry(entry) + const f = it.getAsFile?.() + return f ? Promise.resolve([f]) : Promise.resolve([]) + }), + ) + const files = nested.flat().slice(0, fileUploadConfig.imageFileBatchLimit) + const validFiles = getValidFiles(files) + handleFileUpload(validFiles) + }, [fileUploadConfig, handleFileUpload, getValidFiles]) + + useEffect(() => { + dropRef.current?.addEventListener('dragenter', handleDragEnter) + dropRef.current?.addEventListener('dragover', handleDragOver) + dropRef.current?.addEventListener('dragleave', handleDragLeave) + dropRef.current?.addEventListener('drop', handleDrop) + return () => { + dropRef.current?.removeEventListener('dragenter', handleDragEnter) + dropRef.current?.removeEventListener('dragover', handleDragOver) + dropRef.current?.removeEventListener('dragleave', handleDragLeave) + dropRef.current?.removeEventListener('drop', handleDrop) + } + }, [handleDrop]) + + return { + dragging, + fileUploadConfig, + dragRef, + dropRef, + uploaderRef, + fileChangeHandle, + selectHandle, + handleRemoveFile, + handleReUploadFile, + handleLocalFileUpload, + } +} diff --git a/web/app/components/datasets/common/image-uploader/image-uploader-in-chunk/image-input.tsx b/web/app/components/datasets/common/image-uploader/image-uploader-in-chunk/image-input.tsx new file mode 100644 index 0000000000..3e15b92705 --- /dev/null +++ b/web/app/components/datasets/common/image-uploader/image-uploader-in-chunk/image-input.tsx @@ -0,0 +1,64 @@ +import React from 'react' +import cn from '@/utils/classnames' +import { RiUploadCloud2Line } from '@remixicon/react' +import { useTranslation } from 'react-i18next' +import { useUpload } from '../hooks/use-upload' +import { ACCEPT_TYPES } from '../constants' + +const ImageUploader = () => { + const { t } = useTranslation() + + const { + dragging, + fileUploadConfig, + dragRef, + dropRef, + uploaderRef, + fileChangeHandle, + selectHandle, + } = useUpload() + + return ( +
+ `.${ext}`).join(',')} + onChange={fileChangeHandle} + /> +
+
+ +
+ {t('dataset.imageUploader.button')} + + {t('dataset.imageUploader.browse')} + +
+
+
+ {t('dataset.imageUploader.tip', { + size: fileUploadConfig.imageFileSizeLimit, + supportTypes: ACCEPT_TYPES.join(', '), + batchCount: fileUploadConfig.imageFileBatchLimit, + })} +
+ {dragging &&
} +
+
+ ) +} + +export default React.memo(ImageUploader) diff --git a/web/app/components/datasets/common/image-uploader/image-uploader-in-chunk/image-item.tsx b/web/app/components/datasets/common/image-uploader/image-uploader-in-chunk/image-item.tsx new file mode 100644 index 0000000000..a5bfb65fa2 --- /dev/null +++ b/web/app/components/datasets/common/image-uploader/image-uploader-in-chunk/image-item.tsx @@ -0,0 +1,95 @@ +import { + memo, + useCallback, +} from 'react' +import { + RiCloseLine, +} from '@remixicon/react' +import FileImageRender from '@/app/components/base/file-uploader/file-image-render' +import type { FileEntity } from '../types' +import ProgressCircle from '@/app/components/base/progress-bar/progress-circle' +import { ReplayLine } from '@/app/components/base/icons/src/vender/other' +import { fileIsUploaded } from '../utils' +import Button from '@/app/components/base/button' + +type ImageItemProps = { + file: FileEntity + showDeleteAction?: boolean + onRemove?: (fileId: string) => void + onReUpload?: (fileId: string) => void + onPreview?: (fileId: string) => void +} +const ImageItem = ({ + file, + showDeleteAction, + onRemove, + onReUpload, + onPreview, +}: ImageItemProps) => { + const { id, progress, base64Url, sourceUrl } = file + + const handlePreview = useCallback((e: React.MouseEvent) => { + e.stopPropagation() + e.preventDefault() + onPreview?.(id) + }, [onPreview, id]) + + const handleRemove = useCallback((e: React.MouseEvent) => { + e.stopPropagation() + e.preventDefault() + onRemove?.(id) + }, [onRemove, id]) + + const handleReUpload = useCallback((e: React.MouseEvent) => { + e.stopPropagation() + e.preventDefault() + onReUpload?.(id) + }, [onReUpload, id]) + + return ( +
+ { + showDeleteAction && ( + + ) + } + + { + progress >= 0 && !fileIsUploaded(file) && ( +
+ +
+ ) + } + { + progress === -1 && ( +
+ +
+ ) + } +
+ ) +} + +export default memo(ImageItem) diff --git a/web/app/components/datasets/common/image-uploader/image-uploader-in-chunk/index.tsx b/web/app/components/datasets/common/image-uploader/image-uploader-in-chunk/index.tsx new file mode 100644 index 0000000000..3efa3a19d7 --- /dev/null +++ b/web/app/components/datasets/common/image-uploader/image-uploader-in-chunk/index.tsx @@ -0,0 +1,94 @@ +import { + FileContextProvider, + useFileStoreWithSelector, +} from '../store' +import type { FileEntity } from '../types' +import FileItem from './image-item' +import { useUpload } from '../hooks/use-upload' +import ImageInput from './image-input' +import cn from '@/utils/classnames' +import { useCallback, useState } from 'react' +import type { ImageInfo } from '@/app/components/datasets/common/image-previewer' +import ImagePreviewer from '@/app/components/datasets/common/image-previewer' + +type ImageUploaderInChunkProps = { + disabled?: boolean + className?: string +} +const ImageUploaderInChunk = ({ + disabled, + className, +}: ImageUploaderInChunkProps) => { + const files = useFileStoreWithSelector(s => s.files) + const [previewIndex, setPreviewIndex] = useState(0) + const [previewImages, setPreviewImages] = useState([]) + + const handleImagePreview = useCallback((fileId: string) => { + const index = files.findIndex(item => item.id === fileId) + if (index === -1) return + setPreviewIndex(index) + setPreviewImages(files.map(item => ({ + url: item.base64Url || item.sourceUrl || '', + name: item.name, + size: item.size, + }))) + }, [files]) + + const handleClosePreview = useCallback(() => { + setPreviewImages([]) + }, []) + + const { + handleRemoveFile, + handleReUploadFile, + } = useUpload() + + return ( +
+ {!disabled && } +
+ { + files.map(file => ( + + )) + } +
+ {previewImages.length > 0 && ( + + )} +
+ ) +} + +export type ImageUploaderInChunkWrapperProps = { + value?: FileEntity[] + onChange: (files: FileEntity[]) => void +} & ImageUploaderInChunkProps + +const ImageUploaderInChunkWrapper = ({ + value, + onChange, + ...props +}: ImageUploaderInChunkWrapperProps) => { + return ( + + + + ) +} + +export default ImageUploaderInChunkWrapper diff --git a/web/app/components/datasets/common/image-uploader/image-uploader-in-retrieval-testing/image-input.tsx b/web/app/components/datasets/common/image-uploader/image-uploader-in-retrieval-testing/image-input.tsx new file mode 100644 index 0000000000..4f230e3957 --- /dev/null +++ b/web/app/components/datasets/common/image-uploader/image-uploader-in-retrieval-testing/image-input.tsx @@ -0,0 +1,64 @@ +import React from 'react' +import { useTranslation } from 'react-i18next' +import { useUpload } from '../hooks/use-upload' +import { ACCEPT_TYPES } from '../constants' +import { useFileStoreWithSelector } from '../store' +import { RiImageAddLine } from '@remixicon/react' +import Tooltip from '@/app/components/base/tooltip' + +const ImageUploader = () => { + const { t } = useTranslation() + const files = useFileStoreWithSelector(s => s.files) + + const { + fileUploadConfig, + uploaderRef, + fileChangeHandle, + selectHandle, + } = useUpload() + + return ( +
+ `.${ext}`).join(',')} + onChange={fileChangeHandle} + /> +
+ +
+
+ +
+ {files.length === 0 && ( + + {t('datasetHitTesting.imageUploader.tip', { + size: fileUploadConfig.imageFileSizeLimit, + batchCount: fileUploadConfig.imageFileBatchLimit, + })} + + )} +
+
+
+
+ ) +} + +export default React.memo(ImageUploader) diff --git a/web/app/components/datasets/common/image-uploader/image-uploader-in-retrieval-testing/image-item.tsx b/web/app/components/datasets/common/image-uploader/image-uploader-in-retrieval-testing/image-item.tsx new file mode 100644 index 0000000000..a47356e560 --- /dev/null +++ b/web/app/components/datasets/common/image-uploader/image-uploader-in-retrieval-testing/image-item.tsx @@ -0,0 +1,95 @@ +import { + memo, + useCallback, +} from 'react' +import { + RiCloseLine, +} from '@remixicon/react' +import FileImageRender from '@/app/components/base/file-uploader/file-image-render' +import type { FileEntity } from '../types' +import ProgressCircle from '@/app/components/base/progress-bar/progress-circle' +import { ReplayLine } from '@/app/components/base/icons/src/vender/other' +import { fileIsUploaded } from '../utils' +import Button from '@/app/components/base/button' + +type ImageItemProps = { + file: FileEntity + showDeleteAction?: boolean + onRemove?: (fileId: string) => void + onReUpload?: (fileId: string) => void + onPreview?: (fileId: string) => void +} +const ImageItem = ({ + file, + showDeleteAction, + onRemove, + onReUpload, + onPreview, +}: ImageItemProps) => { + const { id, progress, base64Url, sourceUrl } = file + + const handlePreview = useCallback((e: React.MouseEvent) => { + e.stopPropagation() + e.preventDefault() + onPreview?.(id) + }, [onPreview, id]) + + const handleRemove = useCallback((e: React.MouseEvent) => { + e.stopPropagation() + e.preventDefault() + onRemove?.(id) + }, [onRemove, id]) + + const handleReUpload = useCallback((e: React.MouseEvent) => { + e.stopPropagation() + e.preventDefault() + onReUpload?.(id) + }, [onReUpload, id]) + + return ( +
+ { + showDeleteAction && ( + + ) + } + + { + progress >= 0 && !fileIsUploaded(file) && ( +
+ +
+ ) + } + { + progress === -1 && ( +
+ +
+ ) + } +
+ ) +} + +export default memo(ImageItem) diff --git a/web/app/components/datasets/common/image-uploader/image-uploader-in-retrieval-testing/index.tsx b/web/app/components/datasets/common/image-uploader/image-uploader-in-retrieval-testing/index.tsx new file mode 100644 index 0000000000..2d04132842 --- /dev/null +++ b/web/app/components/datasets/common/image-uploader/image-uploader-in-retrieval-testing/index.tsx @@ -0,0 +1,131 @@ +import { + useCallback, + useState, +} from 'react' +import { + FileContextProvider, +} from '../store' +import type { FileEntity } from '../types' +import { useUpload } from '../hooks/use-upload' +import ImageInput from './image-input' +import cn from '@/utils/classnames' +import { useTranslation } from 'react-i18next' +import { useFileStoreWithSelector } from '../store' +import ImageItem from './image-item' +import type { ImageInfo } from '@/app/components/datasets/common/image-previewer' +import ImagePreviewer from '@/app/components/datasets/common/image-previewer' + +type ImageUploaderInRetrievalTestingProps = { + textArea: React.ReactNode + actionButton: React.ReactNode + showUploader?: boolean + className?: string + actionAreaClassName?: string +} +const ImageUploaderInRetrievalTesting = ({ + textArea, + actionButton, + showUploader = true, + className, + actionAreaClassName, +}: ImageUploaderInRetrievalTestingProps) => { + const { t } = useTranslation() + const files = useFileStoreWithSelector(s => s.files) + const [previewIndex, setPreviewIndex] = useState(0) + const [previewImages, setPreviewImages] = useState([]) + const { + dragging, + dragRef, + dropRef, + handleRemoveFile, + handleReUploadFile, + } = useUpload() + + const handleImagePreview = useCallback((fileId: string) => { + const index = files.findIndex(item => item.id === fileId) + if (index === -1) return + setPreviewIndex(index) + setPreviewImages(files.map(item => ({ + url: item.base64Url || item.sourceUrl || '', + name: item.name, + size: item.size, + }))) + }, [files]) + + const handleClosePreview = useCallback(() => { + setPreviewImages([]) + }, []) + + return ( +
+ {dragging && ( +
+
{t('datasetHitTesting.imageUploader.dropZoneTip')}
+
+
+ )} + {textArea} + { + showUploader && !!files.length && ( +
+ { + files.map(file => ( + + )) + } +
+ ) + } +
+ {showUploader && } + {actionButton} +
+ {previewImages.length > 0 && ( + + )} +
+ ) +} + +export type ImageUploaderInRetrievalTestingWrapperProps = { + value?: FileEntity[] + onChange: (files: FileEntity[]) => void +} & ImageUploaderInRetrievalTestingProps + +const ImageUploaderInRetrievalTestingWrapper = ({ + value, + onChange, + ...props +}: ImageUploaderInRetrievalTestingWrapperProps) => { + return ( + + + + ) +} + +export default ImageUploaderInRetrievalTestingWrapper diff --git a/web/app/components/datasets/common/image-uploader/store.tsx b/web/app/components/datasets/common/image-uploader/store.tsx new file mode 100644 index 0000000000..e3c9e28a84 --- /dev/null +++ b/web/app/components/datasets/common/image-uploader/store.tsx @@ -0,0 +1,67 @@ +import { + createContext, + useContext, + useRef, +} from 'react' +import { + create, + useStore, +} from 'zustand' +import type { + FileEntity, +} from './types' + +type Shape = { + files: FileEntity[] + setFiles: (files: FileEntity[]) => void +} + +export const createFileStore = ( + value: FileEntity[] = [], + onChange?: (files: FileEntity[]) => void, +) => { + return create(set => ({ + files: value ? [...value] : [], + setFiles: (files) => { + set({ files }) + onChange?.(files) + }, + })) +} + +type FileStore = ReturnType +export const FileContext = createContext(null) + +export function useFileStoreWithSelector(selector: (state: Shape) => T): T { + const store = useContext(FileContext) + if (!store) + throw new Error('Missing FileContext.Provider in the tree') + + return useStore(store, selector) +} + +export const useFileStore = () => { + return useContext(FileContext)! +} + +type FileProviderProps = { + children: React.ReactNode + value?: FileEntity[] + onChange?: (files: FileEntity[]) => void +} +export const FileContextProvider = ({ + children, + value, + onChange, +}: FileProviderProps) => { + const storeRef = useRef(undefined) + + if (!storeRef.current) + storeRef.current = createFileStore(value, onChange) + + return ( + + {children} + + ) +} diff --git a/web/app/components/datasets/common/image-uploader/types.ts b/web/app/components/datasets/common/image-uploader/types.ts new file mode 100644 index 0000000000..e918f2b41e --- /dev/null +++ b/web/app/components/datasets/common/image-uploader/types.ts @@ -0,0 +1,18 @@ +export type FileEntity = { + id: string + name: string + size: number + extension: string + mimeType: string + progress: number // -1: error, 0 ~ 99: uploading, 100: uploaded + originalFile?: File // used for re-uploading + uploadedId?: string // for uploaded image id + sourceUrl?: string // for uploaded image + base64Url?: string // for image preview during uploading +} + +export type FileUploadConfig = { + imageFileSizeLimit: number + imageFileBatchLimit: number + singleChunkAttachmentLimit: number +} diff --git a/web/app/components/datasets/common/image-uploader/utils.ts b/web/app/components/datasets/common/image-uploader/utils.ts new file mode 100644 index 0000000000..842b279a98 --- /dev/null +++ b/web/app/components/datasets/common/image-uploader/utils.ts @@ -0,0 +1,92 @@ +import type { FileUploadConfigResponse } from '@/models/common' +import type { FileEntity } from './types' +import { + DEFAULT_IMAGE_FILE_BATCH_LIMIT, + DEFAULT_IMAGE_FILE_SIZE_LIMIT, + DEFAULT_SINGLE_CHUNK_ATTACHMENT_LIMIT, +} from './constants' + +export const getFileType = (currentFile: File) => { + if (!currentFile) + return '' + + const arr = currentFile.name.split('.') + return arr[arr.length - 1] +} + +type FileWithPath = { + relativePath?: string +} & File + +export const traverseFileEntry = (entry: any, prefix = ''): Promise => { + return new Promise((resolve) => { + if (entry.isFile) { + entry.file((file: FileWithPath) => { + file.relativePath = `${prefix}${file.name}` + resolve([file]) + }) + } + else if (entry.isDirectory) { + const reader = entry.createReader() + const entries: any[] = [] + const read = () => { + reader.readEntries(async (results: FileSystemEntry[]) => { + if (!results.length) { + const files = await Promise.all( + entries.map(ent => + traverseFileEntry(ent, `${prefix}${entry.name}/`), + ), + ) + resolve(files.flat()) + } + else { + entries.push(...results) + read() + } + }) + } + read() + } + else { + resolve([]) + } + }) +} + +export const fileIsUploaded = (file: FileEntity) => { + if (file.uploadedId || file.progress === 100) + return true +} + +const getNumberValue = (value: number | string | undefined | null): number => { + if (value === undefined || value === null) + return 0 + if (typeof value === 'number') + return value + if (typeof value === 'string') + return Number(value) + return 0 +} + +export const getFileUploadConfig = (fileUploadConfigResponse: FileUploadConfigResponse | undefined) => { + if (!fileUploadConfigResponse) { + return { + imageFileSizeLimit: DEFAULT_IMAGE_FILE_SIZE_LIMIT, + imageFileBatchLimit: DEFAULT_IMAGE_FILE_BATCH_LIMIT, + singleChunkAttachmentLimit: DEFAULT_SINGLE_CHUNK_ATTACHMENT_LIMIT, + } + } + const { + image_file_batch_limit, + single_chunk_attachment_limit, + attachment_image_file_size_limit, + } = fileUploadConfigResponse + const imageFileSizeLimit = getNumberValue(attachment_image_file_size_limit) + const imageFileBatchLimit = getNumberValue(image_file_batch_limit) + const singleChunkAttachmentLimit = getNumberValue(single_chunk_attachment_limit) + return { + imageFileSizeLimit: imageFileSizeLimit > 0 ? imageFileSizeLimit : DEFAULT_IMAGE_FILE_SIZE_LIMIT, + imageFileBatchLimit: imageFileBatchLimit > 0 ? imageFileBatchLimit : DEFAULT_IMAGE_FILE_BATCH_LIMIT, + singleChunkAttachmentLimit: singleChunkAttachmentLimit > 0 ? singleChunkAttachmentLimit : DEFAULT_SINGLE_CHUNK_ATTACHMENT_LIMIT, + } +} diff --git a/web/app/components/datasets/common/retrieval-method-config/index.tsx b/web/app/components/datasets/common/retrieval-method-config/index.tsx index ed230c52ce..c0952ed4a4 100644 --- a/web/app/components/datasets/common/retrieval-method-config/index.tsx +++ b/web/app/components/datasets/common/retrieval-method-config/index.tsx @@ -20,12 +20,14 @@ import { EffectColor } from '../../settings/chunk-structure/types' type Props = { disabled?: boolean value: RetrievalConfig + showMultiModalTip?: boolean onChange: (value: RetrievalConfig) => void } const RetrievalMethodConfig: FC = ({ disabled = false, value, + showMultiModalTip = false, onChange, }) => { const { t } = useTranslation() @@ -110,6 +112,7 @@ const RetrievalMethodConfig: FC = ({ type={RETRIEVE_METHOD.semantic} value={value} onChange={onChange} + showMultiModalTip={showMultiModalTip} /> )} @@ -132,6 +135,7 @@ const RetrievalMethodConfig: FC = ({ type={RETRIEVE_METHOD.fullText} value={value} onChange={onChange} + showMultiModalTip={showMultiModalTip} /> )} @@ -155,6 +159,7 @@ const RetrievalMethodConfig: FC = ({ type={RETRIEVE_METHOD.hybrid} value={value} onChange={onChange} + showMultiModalTip={showMultiModalTip} /> )} diff --git a/web/app/components/datasets/common/retrieval-param-config/index.tsx b/web/app/components/datasets/common/retrieval-param-config/index.tsx index 0c28149d56..2b703cc44d 100644 --- a/web/app/components/datasets/common/retrieval-param-config/index.tsx +++ b/web/app/components/datasets/common/retrieval-param-config/index.tsx @@ -24,16 +24,19 @@ import { import WeightedScore from '@/app/components/app/configuration/dataset-config/params-config/weighted-score' import Toast from '@/app/components/base/toast' import RadioCard from '@/app/components/base/radio-card' +import { AlertTriangle } from '@/app/components/base/icons/src/vender/solid/alertsAndFeedback' type Props = { type: RETRIEVE_METHOD value: RetrievalConfig + showMultiModalTip?: boolean onChange: (value: RetrievalConfig) => void } const RetrievalParamConfig: FC = ({ type, value, + showMultiModalTip = false, onChange, }) => { const { t } = useTranslation() @@ -133,19 +136,32 @@ const RetrievalParamConfig: FC = ({
{ value.reranking_enable && ( - { - onChange({ - ...value, - reranking_model: { - reranking_provider_name: v.provider, - reranking_model_name: v.model, - }, - }) - }} - /> + <> + { + onChange({ + ...value, + reranking_model: { + reranking_provider_name: v.provider, + reranking_model_name: v.model, + }, + }) + }} + /> + {showMultiModalTip && ( +
+
+
+ +
+ + {t('datasetSettings.form.retrievalSetting.multiModalTip')} + +
+ )} + ) }
@@ -239,19 +255,32 @@ const RetrievalParamConfig: FC = ({ } { value.reranking_mode !== RerankingModeEnum.WeightedScore && ( - { - onChange({ - ...value, - reranking_model: { - reranking_provider_name: v.provider, - reranking_model_name: v.model, - }, - }) - }} - /> + <> + { + onChange({ + ...value, + reranking_model: { + reranking_provider_name: v.provider, + reranking_model_name: v.model, + }, + }) + }} + /> + {showMultiModalTip && ( +
+
+
+ +
+ + {t('datasetSettings.form.retrievalSetting.multiModalTip')} + +
+ )} + ) }
diff --git a/web/app/components/datasets/create/file-uploader/index.tsx b/web/app/components/datasets/create/file-uploader/index.tsx index 4aec0d4082..d258ed694e 100644 --- a/web/app/components/datasets/create/file-uploader/index.tsx +++ b/web/app/components/datasets/create/file-uploader/index.tsx @@ -68,11 +68,11 @@ const FileUploader = ({ .join(locale !== LanguagesSupported[1] ? ', ' : '、 ') })() const ACCEPTS = supportTypes.map((ext: string) => `.${ext}`) - const fileUploadConfig = useMemo(() => fileUploadConfigResponse ?? { - file_size_limit: 15, - batch_count_limit: 5, - file_upload_limit: 5, - }, [fileUploadConfigResponse]) + const fileUploadConfig = useMemo(() => ({ + file_size_limit: fileUploadConfigResponse?.file_size_limit ?? 15, + batch_count_limit: fileUploadConfigResponse?.batch_count_limit ?? 5, + file_upload_limit: fileUploadConfigResponse?.file_upload_limit ?? 5, + }), [fileUploadConfigResponse]) const fileListRef = useRef([]) diff --git a/web/app/components/datasets/create/step-two/index.tsx b/web/app/components/datasets/create/step-two/index.tsx index 22d6837754..43be89c326 100644 --- a/web/app/components/datasets/create/step-two/index.tsx +++ b/web/app/components/datasets/create/step-two/index.tsx @@ -1,6 +1,6 @@ 'use client' import type { FC, PropsWithChildren } from 'react' -import React, { useCallback, useEffect, useState } from 'react' +import React, { useCallback, useEffect, useMemo, useState } from 'react' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' import { @@ -63,6 +63,7 @@ import { AlertTriangle } from '@/app/components/base/icons/src/vender/solid/aler import { noop } from 'lodash-es' import { useDocLink } from '@/context/i18n' import { useInvalidDatasetList } from '@/service/knowledge/use-dataset' +import { checkShowMultiModalTip } from '../../settings/utils' const TextLabel: FC = (props) => { return @@ -495,12 +496,6 @@ const StepTwo = ({ setDefaultConfig(data.rules) setLimitMaxChunkLength(data.limits.indexing_max_segmentation_tokens_length) }, - onError(error) { - Toast.notify({ - type: 'error', - message: `${error}`, - }) - }, }) const getRulesFromDetail = () => { @@ -538,22 +533,8 @@ const StepTwo = ({ setSegmentationType(documentDetail.dataset_process_rule.mode) } - const createFirstDocumentMutation = useCreateFirstDocument({ - onError(error) { - Toast.notify({ - type: 'error', - message: `${error}`, - }) - }, - }) - const createDocumentMutation = useCreateDocument(datasetId!, { - onError(error) { - Toast.notify({ - type: 'error', - message: `${error}`, - }) - }, - }) + const createFirstDocumentMutation = useCreateFirstDocument() + const createDocumentMutation = useCreateDocument(datasetId!) const isCreating = createFirstDocumentMutation.isPending || createDocumentMutation.isPending const invalidDatasetList = useInvalidDatasetList() @@ -613,6 +594,20 @@ const StepTwo = ({ const isModelAndRetrievalConfigDisabled = !!datasetId && !!currentDataset?.data_source_type + const showMultiModalTip = useMemo(() => { + return checkShowMultiModalTip({ + embeddingModel, + rerankingEnable: retrievalConfig.reranking_enable, + rerankModel: { + rerankingProviderName: retrievalConfig.reranking_model.reranking_provider_name, + rerankingModelName: retrievalConfig.reranking_model.reranking_model_name, + }, + indexMethod: indexType, + embeddingModelList, + rerankModelList, + }) + }, [embeddingModel, retrievalConfig.reranking_enable, retrievalConfig.reranking_model, indexType, embeddingModelList, rerankModelList]) + return (
@@ -1012,6 +1007,7 @@ const StepTwo = ({ disabled={isModelAndRetrievalConfigDisabled} value={retrievalConfig} onChange={setRetrievalConfig} + showMultiModalTip={showMultiModalTip} /> ) : ( diff --git a/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx b/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx index 868621e1a3..555f2497ef 100644 --- a/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx +++ b/web/app/components/datasets/documents/create-from-pipeline/data-source/local-file/index.tsx @@ -21,8 +21,6 @@ import dynamic from 'next/dynamic' const SimplePieChart = dynamic(() => import('@/app/components/base/simple-pie-chart'), { ssr: false }) -const FILES_NUMBER_LIMIT = 20 - export type LocalFileProps = { allowedExtensions: string[] notSupportBatchUpload?: boolean @@ -64,10 +62,11 @@ const LocalFile = ({ .join(locale !== LanguagesSupported[1] ? ', ' : '、 ') }, [locale, allowedExtensions]) const ACCEPTS = allowedExtensions.map((ext: string) => `.${ext}`) - const fileUploadConfig = useMemo(() => fileUploadConfigResponse ?? { - file_size_limit: 15, - batch_count_limit: 5, - }, [fileUploadConfigResponse]) + const fileUploadConfig = useMemo(() => ({ + file_size_limit: fileUploadConfigResponse?.file_size_limit ?? 15, + batch_count_limit: fileUploadConfigResponse?.batch_count_limit ?? 5, + file_upload_limit: fileUploadConfigResponse?.file_upload_limit ?? 5, + }), [fileUploadConfigResponse]) const updateFile = useCallback((fileItem: FileItem, progress: number, list: FileItem[]) => { const { setLocalFileList } = dataSourceStore.getState() @@ -186,11 +185,12 @@ const LocalFile = ({ }, [fileUploadConfig, uploadBatchFiles]) const initialUpload = useCallback((files: File[]) => { + const filesCountLimit = fileUploadConfig.file_upload_limit if (!files.length) return false - if (files.length + localFileList.length > FILES_NUMBER_LIMIT && !IS_CE_EDITION) { - notify({ type: 'error', message: t('datasetCreation.stepOne.uploader.validation.filesNumber', { filesNumber: FILES_NUMBER_LIMIT }) }) + if (files.length + localFileList.length > filesCountLimit && !IS_CE_EDITION) { + notify({ type: 'error', message: t('datasetCreation.stepOne.uploader.validation.filesNumber', { filesNumber: filesCountLimit }) }) return false } @@ -203,7 +203,7 @@ const LocalFile = ({ updateFileList(newFiles) fileListRef.current = newFiles uploadMultipleFiles(preparedFiles) - }, [updateFileList, uploadMultipleFiles, notify, t, localFileList]) + }, [fileUploadConfig.file_upload_limit, localFileList.length, updateFileList, uploadMultipleFiles, notify, t]) const handleDragEnter = (e: DragEvent) => { e.preventDefault() @@ -250,9 +250,10 @@ const LocalFile = ({ updateFileList([...fileListRef.current]) } const fileChangeHandle = useCallback((e: React.ChangeEvent) => { - const files = [...(e.target.files ?? [])] as File[] + let files = [...(e.target.files ?? [])] as File[] + files = files.slice(0, fileUploadConfig.batch_count_limit) initialUpload(files.filter(isValid)) - }, [isValid, initialUpload]) + }, [isValid, initialUpload, fileUploadConfig.batch_count_limit]) const { theme } = useTheme() const chartColor = useMemo(() => theme === Theme.dark ? '#5289ff' : '#296dff', [theme]) @@ -305,6 +306,7 @@ const LocalFile = ({ size: fileUploadConfig.file_size_limit, supportTypes: supportTypesShowNames, batchCount: notSupportBatchUpload ? 1 : fileUploadConfig.batch_count_limit, + totalCount: fileUploadConfig.file_upload_limit, })}
{dragging &&
}
diff --git a/web/app/components/datasets/documents/detail/completed/common/action-buttons.tsx b/web/app/components/datasets/documents/detail/completed/common/action-buttons.tsx index 4bed7b461d..c5d3bf5629 100644 --- a/web/app/components/datasets/documents/detail/completed/common/action-buttons.tsx +++ b/web/app/components/datasets/documents/detail/completed/common/action-buttons.tsx @@ -13,6 +13,7 @@ type IActionButtonsProps = { actionType?: 'edit' | 'add' handleRegeneration?: () => void isChildChunk?: boolean + showRegenerationButton?: boolean } const ActionButtons: FC = ({ @@ -22,6 +23,7 @@ const ActionButtons: FC = ({ actionType = 'edit', handleRegeneration, isChildChunk = false, + showRegenerationButton = true, }) => { const { t } = useTranslation() const docForm = useDocumentContext(s => s.docForm) @@ -54,7 +56,7 @@ const ActionButtons: FC = ({ ESC
- {(isParentChildParagraphMode && actionType === 'edit' && !isChildChunk) + {(isParentChildParagraphMode && actionType === 'edit' && !isChildChunk && showRegenerationButton) ?