From 8ec42336111eb6494f64a57f462a9f388ee98b58 Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Tue, 27 Jan 2026 20:19:39 +0900 Subject: [PATCH 1/6] fix: doc not gen bug (#31547) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Stephen Zhou <38493346+hyoban@users.noreply.github.com> --- api/controllers/common/schema.py | 23 +++- api/controllers/console/apikey.py | 4 +- api/controllers/console/app/app.py | 46 ++++++- api/controllers/console/app/app_import.py | 16 +-- api/controllers/console/app/workflow.py | 22 +-- .../console/app/workflow_trigger.py | 17 ++- .../console/datasets/data_source.py | 60 +++++++- api/controllers/console/datasets/datasets.py | 50 +++---- .../console/datasets/datasets_document.py | 21 +-- .../console/datasets/datasets_segments.py | 1 + api/controllers/console/datasets/external.py | 31 ++--- api/controllers/console/datasets/metadata.py | 9 +- .../rag_pipeline_draft_variable.py | 31 ++--- .../rag_pipeline/rag_pipeline_import.py | 28 +++- .../rag_pipeline/rag_pipeline_workflow.py | 34 ++--- .../console/explore/installed_app.py | 18 ++- .../console/explore/recommended_app.py | 13 +- api/controllers/console/explore/trial.py | 55 +++++++- api/controllers/console/workspace/account.py | 26 ++-- api/controllers/console/workspace/members.py | 16 ++- api/controllers/console/workspace/models.py | 38 ++--- api/controllers/console/workspace/plugin.py | 130 +++++++++--------- .../service_api/dataset/dataset.py | 10 +- .../service_api/dataset/document.py | 26 +++- .../service_api/dataset/segment.py | 1 + api/libs/login.py | 8 +- api/services/app_dsl_service.py | 4 +- 27 files changed, 473 insertions(+), 265 deletions(-) diff --git a/api/controllers/common/schema.py b/api/controllers/common/schema.py index e0896a8dc2..a5a3e4ebbd 100644 --- a/api/controllers/common/schema.py +++ b/api/controllers/common/schema.py @@ -1,7 +1,11 @@ """Helpers for registering Pydantic models with Flask-RESTX namespaces.""" +from enum import StrEnum + from flask_restx import Namespace -from pydantic import BaseModel +from pydantic import BaseModel, TypeAdapter + +from controllers.console import console_ns DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}" @@ -19,8 +23,25 @@ def register_schema_models(namespace: Namespace, *models: type[BaseModel]) -> No register_schema_model(namespace, model) +def get_or_create_model(model_name: str, field_def): + existing = console_ns.models.get(model_name) + if existing is None: + existing = console_ns.model(model_name, field_def) + return existing + + +def register_enum_models(namespace: Namespace, *models: type[StrEnum]) -> None: + """Register multiple StrEnum with a namespace.""" + for model in models: + namespace.schema_model( + model.__name__, TypeAdapter(model).json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0) + ) + + __all__ = [ "DEFAULT_REF_TEMPLATE_SWAGGER_2_0", + "get_or_create_model", + "register_enum_models", "register_schema_model", "register_schema_models", ] diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py index 9b0d4b1a78..c81709e985 100644 --- a/api/controllers/console/apikey.py +++ b/api/controllers/console/apikey.py @@ -22,10 +22,10 @@ api_key_fields = { "created_at": TimestampField, } -api_key_list = {"data": fields.List(fields.Nested(api_key_fields), attribute="items")} - api_key_item_model = console_ns.model("ApiKeyItem", api_key_fields) +api_key_list = {"data": fields.List(fields.Nested(api_key_item_model), attribute="items")} + api_key_list_model = console_ns.model( "ApiKeyList", {"data": fields.List(fields.Nested(api_key_item_model), attribute="items")} ) diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index dad184c54b..8c371da596 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -9,9 +9,11 @@ from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import BadRequest -from controllers.common.schema import register_schema_models +from controllers.common.helpers import FileInfo +from controllers.common.schema import register_enum_models, register_schema_models from controllers.console import console_ns from controllers.console.app.wraps import get_app_model +from controllers.console.workspace.models import LoadBalancingPayload from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, @@ -22,18 +24,36 @@ from controllers.console.wraps import ( ) from core.file import helpers as file_helpers from core.ops.ops_trace_manager import OpsTraceManager -from core.workflow.enums import NodeType +from core.rag.retrieval.retrieval_methods import RetrievalMethod +from core.workflow.enums import NodeType, WorkflowExecutionStatus from extensions.ext_database import db from libs.login import current_account_with_tenant, login_required -from models import App, Workflow +from models import App, DatasetPermissionEnum, Workflow from models.model import IconType from services.app_dsl_service import AppDslService, ImportMode from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService +from services.entities.knowledge_entities.knowledge_entities import ( + DataSource, + InfoList, + NotionIcon, + NotionInfo, + NotionPage, + PreProcessingRule, + RerankingModel, + Rule, + Segmentation, + WebsiteInfo, + WeightKeywordSetting, + WeightModel, + WeightVectorSetting, +) from services.feature_service import FeatureService ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"] +register_enum_models(console_ns, IconType) + class AppListQuery(BaseModel): page: int = Field(default=1, ge=1, le=99999, description="Page number (1-99999)") @@ -151,7 +171,7 @@ def _build_icon_url(icon_type: str | IconType | None, icon: str | None) -> str | if icon is None or icon_type is None: return None icon_type_value = icon_type.value if isinstance(icon_type, IconType) else str(icon_type) - if icon_type_value.lower() != IconType.IMAGE.value: + if icon_type_value.lower() != IconType.IMAGE: return None return file_helpers.get_signed_file_url(icon) @@ -391,6 +411,8 @@ class AppExportResponse(ResponseModel): data: str +register_enum_models(console_ns, RetrievalMethod, WorkflowExecutionStatus, DatasetPermissionEnum) + register_schema_models( console_ns, AppListQuery, @@ -414,6 +436,22 @@ register_schema_models( AppDetailWithSite, AppPagination, AppExportResponse, + Segmentation, + PreProcessingRule, + Rule, + WeightVectorSetting, + WeightKeywordSetting, + WeightModel, + RerankingModel, + InfoList, + NotionInfo, + FileInfo, + WebsiteInfo, + NotionPage, + NotionIcon, + RerankingModel, + DataSource, + LoadBalancingPayload, ) diff --git a/api/controllers/console/app/app_import.py b/api/controllers/console/app/app_import.py index 22e2aeb720..fdef54ba5a 100644 --- a/api/controllers/console/app/app_import.py +++ b/api/controllers/console/app/app_import.py @@ -41,14 +41,14 @@ DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}" class AppImportPayload(BaseModel): mode: str = Field(..., description="Import mode") - yaml_content: str | None = None - yaml_url: str | None = None - name: str | None = None - description: str | None = None - icon_type: str | None = None - icon: str | None = None - icon_background: str | None = None - app_id: str | None = None + yaml_content: str | None = Field(None) + yaml_url: str | None = Field(None) + name: str | None = Field(None) + description: str | None = Field(None) + icon_type: str | None = Field(None) + icon: str | None = Field(None) + icon_background: str | None = Field(None) + app_id: str | None = Field(None) console_ns.schema_model( diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index acaf85a6b1..755463cb70 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -12,6 +12,7 @@ from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services from controllers.console import console_ns from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync +from controllers.console.app.workflow_run import workflow_run_node_execution_model from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError @@ -35,7 +36,6 @@ from extensions.ext_database import db from factories import file_factory, variable_factory from fields.member_fields import simple_account_fields from fields.workflow_fields import workflow_fields, workflow_pagination_fields -from fields.workflow_run_fields import workflow_run_node_execution_fields from libs import helper from libs.datetime_utils import naive_utc_now from libs.helper import TimestampField, uuid_value @@ -88,26 +88,6 @@ workflow_pagination_fields_copy = workflow_pagination_fields.copy() workflow_pagination_fields_copy["items"] = fields.List(fields.Nested(workflow_model), attribute="items") workflow_pagination_model = console_ns.model("WorkflowPagination", workflow_pagination_fields_copy) -# Reuse workflow_run_node_execution_model from workflow_run.py if already registered -# Otherwise register it here -from fields.end_user_fields import simple_end_user_fields - -simple_end_user_model = None -try: - simple_end_user_model = console_ns.models.get("SimpleEndUser") -except AttributeError: - pass -if simple_end_user_model is None: - simple_end_user_model = console_ns.model("SimpleEndUser", simple_end_user_fields) - -workflow_run_node_execution_model = None -try: - workflow_run_node_execution_model = console_ns.models.get("WorkflowRunNodeExecution") -except AttributeError: - pass -if workflow_run_node_execution_model is None: - workflow_run_node_execution_model = console_ns.model("WorkflowRunNodeExecution", workflow_run_node_execution_fields) - class SyncDraftWorkflowPayload(BaseModel): graph: dict[str, Any] diff --git a/api/controllers/console/app/workflow_trigger.py b/api/controllers/console/app/workflow_trigger.py index 9433b732e4..8236e766ae 100644 --- a/api/controllers/console/app/workflow_trigger.py +++ b/api/controllers/console/app/workflow_trigger.py @@ -1,13 +1,14 @@ import logging from flask import request -from flask_restx import Resource, marshal_with +from flask_restx import Resource, fields, marshal_with from pydantic import BaseModel from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound from configs import dify_config +from controllers.common.schema import get_or_create_model from extensions.ext_database import db from fields.workflow_trigger_fields import trigger_fields, triggers_list_fields, webhook_trigger_fields from libs.login import current_user, login_required @@ -22,6 +23,14 @@ from ..wraps import account_initialization_required, edit_permission_required, s logger = logging.getLogger(__name__) DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}" +trigger_model = get_or_create_model("WorkflowTrigger", trigger_fields) + +triggers_list_fields_copy = triggers_list_fields.copy() +triggers_list_fields_copy["data"] = fields.List(fields.Nested(trigger_model)) +triggers_list_model = get_or_create_model("WorkflowTriggerList", triggers_list_fields_copy) + +webhook_trigger_model = get_or_create_model("WebhookTrigger", webhook_trigger_fields) + class Parser(BaseModel): node_id: str @@ -48,7 +57,7 @@ class WebhookTriggerApi(Resource): @login_required @account_initialization_required @get_app_model(mode=AppMode.WORKFLOW) - @marshal_with(webhook_trigger_fields) + @marshal_with(webhook_trigger_model) def get(self, app_model: App): """Get webhook trigger for a node""" args = Parser.model_validate(request.args.to_dict(flat=True)) # type: ignore @@ -80,7 +89,7 @@ class AppTriggersApi(Resource): @login_required @account_initialization_required @get_app_model(mode=AppMode.WORKFLOW) - @marshal_with(triggers_list_fields) + @marshal_with(triggers_list_model) def get(self, app_model: App): """Get app triggers list""" assert isinstance(current_user, Account) @@ -120,7 +129,7 @@ class AppTriggerEnableApi(Resource): @account_initialization_required @edit_permission_required @get_app_model(mode=AppMode.WORKFLOW) - @marshal_with(trigger_fields) + @marshal_with(trigger_model) def post(self, app_model: App): """Update app trigger (enable/disable)""" args = ParserEnable.model_validate(console_ns.payload) diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py index d05e726dcb..01e9bf77c0 100644 --- a/api/controllers/console/datasets/data_source.py +++ b/api/controllers/console/datasets/data_source.py @@ -3,13 +3,13 @@ from collections.abc import Generator from typing import Any, cast from flask import request -from flask_restx import Resource, marshal_with +from flask_restx import Resource, fields, marshal_with from pydantic import BaseModel, Field from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound -from controllers.common.schema import register_schema_model +from controllers.common.schema import get_or_create_model, register_schema_model from core.datasource.entities.datasource_entities import DatasourceProviderType, OnlineDocumentPagesMessage from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin from core.indexing_runner import IndexingRunner @@ -17,7 +17,14 @@ from core.rag.extractor.entity.datasource_type import DatasourceType from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo from core.rag.extractor.notion_extractor import NotionExtractor from extensions.ext_database import db -from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields +from fields.data_source_fields import ( + integrate_fields, + integrate_icon_fields, + integrate_list_fields, + integrate_notion_info_list_fields, + integrate_page_fields, + integrate_workspace_fields, +) from libs.datetime_utils import naive_utc_now from libs.login import current_account_with_tenant, login_required from models import DataSourceOauthBinding, Document @@ -49,6 +56,49 @@ class DataSourceNotionPreviewQuery(BaseModel): register_schema_model(console_ns, NotionEstimatePayload) +integrate_icon_model = get_or_create_model("DataSourceIntegrateIcon", integrate_icon_fields) + +integrate_page_fields_copy = integrate_page_fields.copy() +integrate_page_fields_copy["page_icon"] = fields.Nested(integrate_icon_model, allow_null=True) +integrate_page_model = get_or_create_model("DataSourceIntegratePage", integrate_page_fields_copy) + +integrate_workspace_fields_copy = integrate_workspace_fields.copy() +integrate_workspace_fields_copy["pages"] = fields.List(fields.Nested(integrate_page_model)) +integrate_workspace_model = get_or_create_model("DataSourceIntegrateWorkspace", integrate_workspace_fields_copy) + +integrate_fields_copy = integrate_fields.copy() +integrate_fields_copy["source_info"] = fields.Nested(integrate_workspace_model) +integrate_model = get_or_create_model("DataSourceIntegrate", integrate_fields_copy) + +integrate_list_fields_copy = integrate_list_fields.copy() +integrate_list_fields_copy["data"] = fields.List(fields.Nested(integrate_model)) +integrate_list_model = get_or_create_model("DataSourceIntegrateList", integrate_list_fields_copy) + +notion_page_fields = { + "page_name": fields.String, + "page_id": fields.String, + "page_icon": fields.Nested(integrate_icon_model, allow_null=True), + "is_bound": fields.Boolean, + "parent_id": fields.String, + "type": fields.String, +} +notion_page_model = get_or_create_model("NotionIntegratePage", notion_page_fields) + +notion_workspace_fields = { + "workspace_name": fields.String, + "workspace_id": fields.String, + "workspace_icon": fields.String, + "pages": fields.List(fields.Nested(notion_page_model)), +} +notion_workspace_model = get_or_create_model("NotionIntegrateWorkspace", notion_workspace_fields) + +integrate_notion_info_list_fields_copy = integrate_notion_info_list_fields.copy() +integrate_notion_info_list_fields_copy["notion_info"] = fields.List(fields.Nested(notion_workspace_model)) +integrate_notion_info_list_model = get_or_create_model( + "NotionIntegrateInfoList", integrate_notion_info_list_fields_copy +) + + @console_ns.route( "/data-source/integrates", "/data-source/integrates//", @@ -57,7 +107,7 @@ class DataSourceApi(Resource): @setup_required @login_required @account_initialization_required - @marshal_with(integrate_list_fields) + @marshal_with(integrate_list_model) def get(self): _, current_tenant_id = current_account_with_tenant() @@ -142,7 +192,7 @@ class DataSourceNotionListApi(Resource): @setup_required @login_required @account_initialization_required - @marshal_with(integrate_notion_info_list_fields) + @marshal_with(integrate_notion_info_list_model) def get(self): current_user, current_tenant_id = current_account_with_tenant() diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 37c828c3a8..8fbbc51e21 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -8,7 +8,7 @@ from werkzeug.exceptions import Forbidden, NotFound import services from configs import dify_config -from controllers.common.schema import register_schema_models +from controllers.common.schema import get_or_create_model, register_schema_models from controllers.console import console_ns from controllers.console.apikey import ( api_key_item_model, @@ -34,6 +34,7 @@ from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db from fields.app_fields import app_detail_kernel_fields, related_app_list from fields.dataset_fields import ( + content_fields, dataset_detail_fields, dataset_fields, dataset_query_detail_fields, @@ -41,6 +42,7 @@ from fields.dataset_fields import ( doc_metadata_fields, external_knowledge_info_fields, external_retrieval_model_fields, + file_info_fields, icon_info_fields, keyword_setting_fields, reranking_model_fields, @@ -55,41 +57,33 @@ from models.dataset import DatasetPermissionEnum from models.provider_ids import ModelProviderID from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService - -def _get_or_create_model(model_name: str, field_def): - existing = console_ns.models.get(model_name) - if existing is None: - existing = console_ns.model(model_name, field_def) - return existing - - # Register models for flask_restx to avoid dict type issues in Swagger -dataset_base_model = _get_or_create_model("DatasetBase", dataset_fields) +dataset_base_model = get_or_create_model("DatasetBase", dataset_fields) -tag_model = _get_or_create_model("Tag", tag_fields) +tag_model = get_or_create_model("Tag", tag_fields) -keyword_setting_model = _get_or_create_model("DatasetKeywordSetting", keyword_setting_fields) -vector_setting_model = _get_or_create_model("DatasetVectorSetting", vector_setting_fields) +keyword_setting_model = get_or_create_model("DatasetKeywordSetting", keyword_setting_fields) +vector_setting_model = get_or_create_model("DatasetVectorSetting", vector_setting_fields) weighted_score_fields_copy = weighted_score_fields.copy() weighted_score_fields_copy["keyword_setting"] = fields.Nested(keyword_setting_model) weighted_score_fields_copy["vector_setting"] = fields.Nested(vector_setting_model) -weighted_score_model = _get_or_create_model("DatasetWeightedScore", weighted_score_fields_copy) +weighted_score_model = get_or_create_model("DatasetWeightedScore", weighted_score_fields_copy) -reranking_model = _get_or_create_model("DatasetRerankingModel", reranking_model_fields) +reranking_model = get_or_create_model("DatasetRerankingModel", reranking_model_fields) dataset_retrieval_model_fields_copy = dataset_retrieval_model_fields.copy() dataset_retrieval_model_fields_copy["reranking_model"] = fields.Nested(reranking_model) dataset_retrieval_model_fields_copy["weights"] = fields.Nested(weighted_score_model, allow_null=True) -dataset_retrieval_model = _get_or_create_model("DatasetRetrievalModel", dataset_retrieval_model_fields_copy) +dataset_retrieval_model = get_or_create_model("DatasetRetrievalModel", dataset_retrieval_model_fields_copy) -external_knowledge_info_model = _get_or_create_model("ExternalKnowledgeInfo", external_knowledge_info_fields) +external_knowledge_info_model = get_or_create_model("ExternalKnowledgeInfo", external_knowledge_info_fields) -external_retrieval_model = _get_or_create_model("ExternalRetrievalModel", external_retrieval_model_fields) +external_retrieval_model = get_or_create_model("ExternalRetrievalModel", external_retrieval_model_fields) -doc_metadata_model = _get_or_create_model("DatasetDocMetadata", doc_metadata_fields) +doc_metadata_model = get_or_create_model("DatasetDocMetadata", doc_metadata_fields) -icon_info_model = _get_or_create_model("DatasetIconInfo", icon_info_fields) +icon_info_model = get_or_create_model("DatasetIconInfo", icon_info_fields) dataset_detail_fields_copy = dataset_detail_fields.copy() dataset_detail_fields_copy["retrieval_model_dict"] = fields.Nested(dataset_retrieval_model) @@ -98,14 +92,22 @@ dataset_detail_fields_copy["external_knowledge_info"] = fields.Nested(external_k dataset_detail_fields_copy["external_retrieval_model"] = fields.Nested(external_retrieval_model, allow_null=True) dataset_detail_fields_copy["doc_metadata"] = fields.List(fields.Nested(doc_metadata_model)) dataset_detail_fields_copy["icon_info"] = fields.Nested(icon_info_model) -dataset_detail_model = _get_or_create_model("DatasetDetail", dataset_detail_fields_copy) +dataset_detail_model = get_or_create_model("DatasetDetail", dataset_detail_fields_copy) -dataset_query_detail_model = _get_or_create_model("DatasetQueryDetail", dataset_query_detail_fields) +file_info_model = get_or_create_model("DatasetFileInfo", file_info_fields) -app_detail_kernel_model = _get_or_create_model("AppDetailKernel", app_detail_kernel_fields) +content_fields_copy = content_fields.copy() +content_fields_copy["file_info"] = fields.Nested(file_info_model, allow_null=True) +content_model = get_or_create_model("DatasetContent", content_fields_copy) + +dataset_query_detail_fields_copy = dataset_query_detail_fields.copy() +dataset_query_detail_fields_copy["queries"] = fields.Nested(content_model) +dataset_query_detail_model = get_or_create_model("DatasetQueryDetail", dataset_query_detail_fields_copy) + +app_detail_kernel_model = get_or_create_model("AppDetailKernel", app_detail_kernel_fields) related_app_list_copy = related_app_list.copy() related_app_list_copy["data"] = fields.List(fields.Nested(app_detail_kernel_model)) -related_app_list_model = _get_or_create_model("RelatedAppList", related_app_list_copy) +related_app_list_model = get_or_create_model("RelatedAppList", related_app_list_copy) def _validate_indexing_technique(value: str | None) -> str | None: diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 2599e6293a..57fb9abf29 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -14,7 +14,7 @@ from sqlalchemy import asc, desc, select from werkzeug.exceptions import Forbidden, NotFound import services -from controllers.common.schema import register_schema_models +from controllers.common.schema import get_or_create_model, register_schema_models from controllers.console import console_ns from core.errors.error import ( LLMBadRequestError, @@ -72,34 +72,27 @@ logger = logging.getLogger(__name__) DOCUMENT_BATCH_DOWNLOAD_ZIP_MAX_DOCS = 100 -def _get_or_create_model(model_name: str, field_def): - existing = console_ns.models.get(model_name) - if existing is None: - existing = console_ns.model(model_name, field_def) - return existing - - # Register models for flask_restx to avoid dict type issues in Swagger -dataset_model = _get_or_create_model("Dataset", dataset_fields) +dataset_model = get_or_create_model("Dataset", dataset_fields) -document_metadata_model = _get_or_create_model("DocumentMetadata", document_metadata_fields) +document_metadata_model = get_or_create_model("DocumentMetadata", document_metadata_fields) document_fields_copy = document_fields.copy() document_fields_copy["doc_metadata"] = fields.List( fields.Nested(document_metadata_model), attribute="doc_metadata_details" ) -document_model = _get_or_create_model("Document", document_fields_copy) +document_model = get_or_create_model("Document", document_fields_copy) document_with_segments_fields_copy = document_with_segments_fields.copy() document_with_segments_fields_copy["doc_metadata"] = fields.List( fields.Nested(document_metadata_model), attribute="doc_metadata_details" ) -document_with_segments_model = _get_or_create_model("DocumentWithSegments", document_with_segments_fields_copy) +document_with_segments_model = get_or_create_model("DocumentWithSegments", document_with_segments_fields_copy) dataset_and_document_fields_copy = dataset_and_document_fields.copy() dataset_and_document_fields_copy["dataset"] = fields.Nested(dataset_model) dataset_and_document_fields_copy["documents"] = fields.List(fields.Nested(document_model)) -dataset_and_document_model = _get_or_create_model("DatasetAndDocument", dataset_and_document_fields_copy) +dataset_and_document_model = get_or_create_model("DatasetAndDocument", dataset_and_document_fields_copy) class DocumentRetryPayload(BaseModel): @@ -1178,7 +1171,7 @@ class DocumentRenameApi(DocumentResource): @setup_required @login_required @account_initialization_required - @marshal_with(document_fields) + @marshal_with(document_model) @console_ns.expect(console_ns.models[DocumentRenamePayload.__name__]) def post(self, dataset_id, document_id): # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index 16fecb41c6..08e1ddd3e0 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -90,6 +90,7 @@ register_schema_models( ChildChunkCreatePayload, ChildChunkUpdatePayload, ChildChunkBatchUpdatePayload, + ChildChunkUpdateArgs, ) diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py index 588eb6e1b8..86090bcd10 100644 --- a/api/controllers/console/datasets/external.py +++ b/api/controllers/console/datasets/external.py @@ -4,7 +4,7 @@ from pydantic import BaseModel, Field from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services -from controllers.common.schema import register_schema_models +from controllers.common.schema import get_or_create_model, register_schema_models from controllers.console import console_ns from controllers.console.datasets.error import DatasetNameDuplicateError from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required @@ -28,34 +28,27 @@ from services.hit_testing_service import HitTestingService from services.knowledge_service import ExternalDatasetTestService -def _get_or_create_model(model_name: str, field_def): - existing = console_ns.models.get(model_name) - if existing is None: - existing = console_ns.model(model_name, field_def) - return existing - - def _build_dataset_detail_model(): - keyword_setting_model = _get_or_create_model("DatasetKeywordSetting", keyword_setting_fields) - vector_setting_model = _get_or_create_model("DatasetVectorSetting", vector_setting_fields) + keyword_setting_model = get_or_create_model("DatasetKeywordSetting", keyword_setting_fields) + vector_setting_model = get_or_create_model("DatasetVectorSetting", vector_setting_fields) weighted_score_fields_copy = weighted_score_fields.copy() weighted_score_fields_copy["keyword_setting"] = fields.Nested(keyword_setting_model) weighted_score_fields_copy["vector_setting"] = fields.Nested(vector_setting_model) - weighted_score_model = _get_or_create_model("DatasetWeightedScore", weighted_score_fields_copy) + weighted_score_model = get_or_create_model("DatasetWeightedScore", weighted_score_fields_copy) - reranking_model = _get_or_create_model("DatasetRerankingModel", reranking_model_fields) + reranking_model = get_or_create_model("DatasetRerankingModel", reranking_model_fields) dataset_retrieval_model_fields_copy = dataset_retrieval_model_fields.copy() dataset_retrieval_model_fields_copy["reranking_model"] = fields.Nested(reranking_model) dataset_retrieval_model_fields_copy["weights"] = fields.Nested(weighted_score_model, allow_null=True) - dataset_retrieval_model = _get_or_create_model("DatasetRetrievalModel", dataset_retrieval_model_fields_copy) + dataset_retrieval_model = get_or_create_model("DatasetRetrievalModel", dataset_retrieval_model_fields_copy) - tag_model = _get_or_create_model("Tag", tag_fields) - doc_metadata_model = _get_or_create_model("DatasetDocMetadata", doc_metadata_fields) - external_knowledge_info_model = _get_or_create_model("ExternalKnowledgeInfo", external_knowledge_info_fields) - external_retrieval_model = _get_or_create_model("ExternalRetrievalModel", external_retrieval_model_fields) - icon_info_model = _get_or_create_model("DatasetIconInfo", icon_info_fields) + tag_model = get_or_create_model("Tag", tag_fields) + doc_metadata_model = get_or_create_model("DatasetDocMetadata", doc_metadata_fields) + external_knowledge_info_model = get_or_create_model("ExternalKnowledgeInfo", external_knowledge_info_fields) + external_retrieval_model = get_or_create_model("ExternalRetrievalModel", external_retrieval_model_fields) + icon_info_model = get_or_create_model("DatasetIconInfo", icon_info_fields) dataset_detail_fields_copy = dataset_detail_fields.copy() dataset_detail_fields_copy["retrieval_model_dict"] = fields.Nested(dataset_retrieval_model) @@ -64,7 +57,7 @@ def _build_dataset_detail_model(): dataset_detail_fields_copy["external_retrieval_model"] = fields.Nested(external_retrieval_model, allow_null=True) dataset_detail_fields_copy["doc_metadata"] = fields.List(fields.Nested(doc_metadata_model)) dataset_detail_fields_copy["icon_info"] = fields.Nested(icon_info_model) - return _get_or_create_model("DatasetDetail", dataset_detail_fields_copy) + return get_or_create_model("DatasetDetail", dataset_detail_fields_copy) try: diff --git a/api/controllers/console/datasets/metadata.py b/api/controllers/console/datasets/metadata.py index 8eead1696a..05fc4cd714 100644 --- a/api/controllers/console/datasets/metadata.py +++ b/api/controllers/console/datasets/metadata.py @@ -4,14 +4,16 @@ from flask_restx import Resource, marshal_with from pydantic import BaseModel from werkzeug.exceptions import NotFound -from controllers.common.schema import register_schema_model, register_schema_models +from controllers.common.schema import register_schema_models from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required from fields.dataset_fields import dataset_metadata_fields from libs.login import current_account_with_tenant, login_required from services.dataset_service import DatasetService from services.entities.knowledge_entities.knowledge_entities import ( + DocumentMetadataOperation, MetadataArgs, + MetadataDetail, MetadataOperationData, ) from services.metadata_service import MetadataService @@ -21,8 +23,9 @@ class MetadataUpdatePayload(BaseModel): name: str -register_schema_models(console_ns, MetadataArgs, MetadataOperationData) -register_schema_model(console_ns, MetadataUpdatePayload) +register_schema_models( + console_ns, MetadataArgs, MetadataOperationData, MetadataUpdatePayload, DocumentMetadataOperation, MetadataDetail +) @console_ns.route("/datasets//metadata") diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py index 720e2ce365..2911b1cf18 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py @@ -2,7 +2,7 @@ import logging from typing import Any, NoReturn from flask import Response, request -from flask_restx import Resource, fields, marshal, marshal_with +from flask_restx import Resource, marshal, marshal_with from pydantic import BaseModel, Field from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden @@ -14,7 +14,9 @@ from controllers.console.app.error import ( ) from controllers.console.app.workflow_draft_variable import ( _WORKFLOW_DRAFT_VARIABLE_FIELDS, # type: ignore[private-usage] - _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, # type: ignore[private-usage] + workflow_draft_variable_list_model, + workflow_draft_variable_list_without_value_model, + workflow_draft_variable_model, ) from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import account_initialization_required, setup_required @@ -27,7 +29,6 @@ from factories.variable_factory import build_segment_with_type from libs.login import current_user, login_required from models import Account from models.dataset import Pipeline -from models.workflow import WorkflowDraftVariable from services.rag_pipeline.rag_pipeline import RagPipelineService from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService @@ -52,20 +53,6 @@ class WorkflowDraftVariablePatchPayload(BaseModel): register_schema_models(console_ns, WorkflowDraftVariablePatchPayload) -def _get_items(var_list: WorkflowDraftVariableList) -> list[WorkflowDraftVariable]: - return var_list.variables - - -_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS = { - "items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS), attribute=_get_items), - "total": fields.Raw(), -} - -_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS = { - "items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_FIELDS), attribute=_get_items), -} - - def _api_prerequisite(f): """Common prerequisites for all draft workflow variable APIs. @@ -92,7 +79,7 @@ def _api_prerequisite(f): @console_ns.route("/rag/pipelines//workflows/draft/variables") class RagPipelineVariableCollectionApi(Resource): @_api_prerequisite - @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS) + @marshal_with(workflow_draft_variable_list_without_value_model) def get(self, pipeline: Pipeline): """ Get draft workflow @@ -150,7 +137,7 @@ def validate_node_id(node_id: str) -> NoReturn | None: @console_ns.route("/rag/pipelines//workflows/draft/nodes//variables") class RagPipelineNodeVariableCollectionApi(Resource): @_api_prerequisite - @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + @marshal_with(workflow_draft_variable_list_model) def get(self, pipeline: Pipeline, node_id: str): validate_node_id(node_id) with Session(bind=db.engine, expire_on_commit=False) as session: @@ -176,7 +163,7 @@ class RagPipelineVariableApi(Resource): _PATCH_VALUE_FIELD = "value" @_api_prerequisite - @marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS) + @marshal_with(workflow_draft_variable_model) def get(self, pipeline: Pipeline, variable_id: str): draft_var_srv = WorkflowDraftVariableService( session=db.session(), @@ -189,7 +176,7 @@ class RagPipelineVariableApi(Resource): return variable @_api_prerequisite - @marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS) + @marshal_with(workflow_draft_variable_model) @console_ns.expect(console_ns.models[WorkflowDraftVariablePatchPayload.__name__]) def patch(self, pipeline: Pipeline, variable_id: str): # Request payload for file types: @@ -307,7 +294,7 @@ def _get_variable_list(pipeline: Pipeline, node_id) -> WorkflowDraftVariableList @console_ns.route("/rag/pipelines//workflows/draft/system-variables") class RagPipelineSystemVariableCollectionApi(Resource): @_api_prerequisite - @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + @marshal_with(workflow_draft_variable_list_model) def get(self, pipeline: Pipeline): return _get_variable_list(pipeline, SYSTEM_VARIABLE_NODE_ID) diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py index d43ee9a6e0..af142b4646 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py @@ -1,9 +1,9 @@ from flask import request -from flask_restx import Resource, marshal_with # type: ignore +from flask_restx import Resource, fields, marshal_with # type: ignore from pydantic import BaseModel, Field from sqlalchemy.orm import Session -from controllers.common.schema import register_schema_models +from controllers.common.schema import get_or_create_model, register_schema_models from controllers.console import console_ns from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import ( @@ -12,7 +12,11 @@ from controllers.console.wraps import ( setup_required, ) from extensions.ext_database import db -from fields.rag_pipeline_fields import pipeline_import_check_dependencies_fields, pipeline_import_fields +from fields.rag_pipeline_fields import ( + leaked_dependency_fields, + pipeline_import_check_dependencies_fields, + pipeline_import_fields, +) from libs.login import current_account_with_tenant, login_required from models.dataset import Pipeline from services.app_dsl_service import ImportStatus @@ -38,13 +42,25 @@ class IncludeSecretQuery(BaseModel): register_schema_models(console_ns, RagPipelineImportPayload, IncludeSecretQuery) +pipeline_import_model = get_or_create_model("RagPipelineImport", pipeline_import_fields) + +leaked_dependency_model = get_or_create_model("RagPipelineLeakedDependency", leaked_dependency_fields) +pipeline_import_check_dependencies_fields_copy = pipeline_import_check_dependencies_fields.copy() +pipeline_import_check_dependencies_fields_copy["leaked_dependencies"] = fields.List( + fields.Nested(leaked_dependency_model) +) +pipeline_import_check_dependencies_model = get_or_create_model( + "RagPipelineImportCheckDependencies", pipeline_import_check_dependencies_fields_copy +) + + @console_ns.route("/rag/pipelines/imports") class RagPipelineImportApi(Resource): @setup_required @login_required @account_initialization_required @edit_permission_required - @marshal_with(pipeline_import_fields) + @marshal_with(pipeline_import_model) @console_ns.expect(console_ns.models[RagPipelineImportPayload.__name__]) def post(self): # Check user role first @@ -81,7 +97,7 @@ class RagPipelineImportConfirmApi(Resource): @login_required @account_initialization_required @edit_permission_required - @marshal_with(pipeline_import_fields) + @marshal_with(pipeline_import_model) def post(self, import_id): current_user, _ = current_account_with_tenant() @@ -106,7 +122,7 @@ class RagPipelineImportCheckDependenciesApi(Resource): @get_rag_pipeline @account_initialization_required @edit_permission_required - @marshal_with(pipeline_import_check_dependencies_fields) + @marshal_with(pipeline_import_check_dependencies_model) def get(self, pipeline: Pipeline): with Session(db.engine) as session: import_service = RagPipelineDslService(session) diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py index 02efc54eea..d34fd5088d 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py @@ -17,6 +17,13 @@ from controllers.console.app.error import ( DraftWorkflowNotExist, DraftWorkflowNotSync, ) +from controllers.console.app.workflow import workflow_model, workflow_pagination_model +from controllers.console.app.workflow_run import ( + workflow_run_detail_model, + workflow_run_node_execution_list_model, + workflow_run_node_execution_model, + workflow_run_pagination_model, +) from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import ( account_initialization_required, @@ -30,13 +37,6 @@ from core.app.entities.app_invoke_entities import InvokeFrom from core.model_runtime.utils.encoders import jsonable_encoder from extensions.ext_database import db from factories import variable_factory -from fields.workflow_fields import workflow_fields, workflow_pagination_fields -from fields.workflow_run_fields import ( - workflow_run_detail_fields, - workflow_run_node_execution_fields, - workflow_run_node_execution_list_fields, - workflow_run_pagination_fields, -) from libs import helper from libs.helper import TimestampField from libs.login import current_account_with_tenant, current_user, login_required @@ -145,7 +145,7 @@ class DraftRagPipelineApi(Resource): @account_initialization_required @get_rag_pipeline @edit_permission_required - @marshal_with(workflow_fields) + @marshal_with(workflow_model) def get(self, pipeline: Pipeline): """ Get draft rag pipeline's workflow @@ -521,7 +521,7 @@ class RagPipelineDraftNodeRunApi(Resource): @edit_permission_required @account_initialization_required @get_rag_pipeline - @marshal_with(workflow_run_node_execution_fields) + @marshal_with(workflow_run_node_execution_model) def post(self, pipeline: Pipeline, node_id: str): """ Run draft workflow node @@ -569,7 +569,7 @@ class PublishedRagPipelineApi(Resource): @account_initialization_required @edit_permission_required @get_rag_pipeline - @marshal_with(workflow_fields) + @marshal_with(workflow_model) def get(self, pipeline: Pipeline): """ Get published pipeline @@ -664,7 +664,7 @@ class PublishedAllRagPipelineApi(Resource): @account_initialization_required @edit_permission_required @get_rag_pipeline - @marshal_with(workflow_pagination_fields) + @marshal_with(workflow_pagination_model) def get(self, pipeline: Pipeline): """ Get published workflows @@ -708,7 +708,7 @@ class RagPipelineByIdApi(Resource): @account_initialization_required @edit_permission_required @get_rag_pipeline - @marshal_with(workflow_fields) + @marshal_with(workflow_model) def patch(self, pipeline: Pipeline, workflow_id: str): """ Update workflow attributes @@ -830,7 +830,7 @@ class RagPipelineWorkflowRunListApi(Resource): @login_required @account_initialization_required @get_rag_pipeline - @marshal_with(workflow_run_pagination_fields) + @marshal_with(workflow_run_pagination_model) def get(self, pipeline: Pipeline): """ Get workflow run list @@ -858,7 +858,7 @@ class RagPipelineWorkflowRunDetailApi(Resource): @login_required @account_initialization_required @get_rag_pipeline - @marshal_with(workflow_run_detail_fields) + @marshal_with(workflow_run_detail_model) def get(self, pipeline: Pipeline, run_id): """ Get workflow run detail @@ -877,7 +877,7 @@ class RagPipelineWorkflowRunNodeExecutionListApi(Resource): @login_required @account_initialization_required @get_rag_pipeline - @marshal_with(workflow_run_node_execution_list_fields) + @marshal_with(workflow_run_node_execution_list_model) def get(self, pipeline: Pipeline, run_id: str): """ Get workflow run node execution list @@ -911,7 +911,7 @@ class RagPipelineWorkflowLastRunApi(Resource): @login_required @account_initialization_required @get_rag_pipeline - @marshal_with(workflow_run_node_execution_fields) + @marshal_with(workflow_run_node_execution_model) def get(self, pipeline: Pipeline, node_id: str): rag_pipeline_service = RagPipelineService() workflow = rag_pipeline_service.get_draft_workflow(pipeline=pipeline) @@ -952,7 +952,7 @@ class RagPipelineDatasourceVariableApi(Resource): @account_initialization_required @get_rag_pipeline @edit_permission_required - @marshal_with(workflow_run_node_execution_fields) + @marshal_with(workflow_run_node_execution_model) def post(self, pipeline: Pipeline): """ Set datasource variables diff --git a/api/controllers/console/explore/installed_app.py b/api/controllers/console/explore/installed_app.py index b77eac605e..aca766567f 100644 --- a/api/controllers/console/explore/installed_app.py +++ b/api/controllers/console/explore/installed_app.py @@ -2,16 +2,17 @@ import logging from typing import Any from flask import request -from flask_restx import Resource, marshal_with +from flask_restx import Resource, fields, marshal_with from pydantic import BaseModel, Field from sqlalchemy import and_, select from werkzeug.exceptions import BadRequest, Forbidden, NotFound +from controllers.common.schema import get_or_create_model from controllers.console import console_ns from controllers.console.explore.wraps import InstalledAppResource from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check from extensions.ext_database import db -from fields.installed_app_fields import installed_app_list_fields +from fields.installed_app_fields import app_fields, installed_app_fields, installed_app_list_fields from libs.datetime_utils import naive_utc_now from libs.login import current_account_with_tenant, login_required from models import App, InstalledApp, RecommendedApp @@ -35,11 +36,22 @@ class InstalledAppsListQuery(BaseModel): logger = logging.getLogger(__name__) +app_model = get_or_create_model("InstalledAppInfo", app_fields) + +installed_app_fields_copy = installed_app_fields.copy() +installed_app_fields_copy["app"] = fields.Nested(app_model) +installed_app_model = get_or_create_model("InstalledApp", installed_app_fields_copy) + +installed_app_list_fields_copy = installed_app_list_fields.copy() +installed_app_list_fields_copy["installed_apps"] = fields.List(fields.Nested(installed_app_model)) +installed_app_list_model = get_or_create_model("InstalledAppList", installed_app_list_fields_copy) + + @console_ns.route("/installed-apps") class InstalledAppsListApi(Resource): @login_required @account_initialization_required - @marshal_with(installed_app_list_fields) + @marshal_with(installed_app_list_model) def get(self): query = InstalledAppsListQuery.model_validate(request.args.to_dict()) current_user, current_tenant_id = current_account_with_tenant() diff --git a/api/controllers/console/explore/recommended_app.py b/api/controllers/console/explore/recommended_app.py index 362513ec1c..c9920c97cf 100644 --- a/api/controllers/console/explore/recommended_app.py +++ b/api/controllers/console/explore/recommended_app.py @@ -3,6 +3,7 @@ from flask_restx import Resource, fields, marshal_with from pydantic import BaseModel, Field from constants.languages import languages +from controllers.common.schema import get_or_create_model from controllers.console import console_ns from controllers.console.wraps import account_initialization_required from libs.helper import AppIconUrlField @@ -19,8 +20,10 @@ app_fields = { "icon_background": fields.String, } +app_model = get_or_create_model("RecommendedAppInfo", app_fields) + recommended_app_fields = { - "app": fields.Nested(app_fields, attribute="app"), + "app": fields.Nested(app_model, attribute="app"), "app_id": fields.String, "description": fields.String(attribute="description"), "copyright": fields.String, @@ -32,11 +35,15 @@ recommended_app_fields = { "can_trial": fields.Boolean, } +recommended_app_model = get_or_create_model("RecommendedApp", recommended_app_fields) + recommended_app_list_fields = { - "recommended_apps": fields.List(fields.Nested(recommended_app_fields)), + "recommended_apps": fields.List(fields.Nested(recommended_app_model)), "categories": fields.List(fields.String), } +recommended_app_list_model = get_or_create_model("RecommendedAppList", recommended_app_list_fields) + class RecommendedAppsQuery(BaseModel): language: str | None = Field(default=None) @@ -53,7 +60,7 @@ class RecommendedAppListApi(Resource): @console_ns.expect(console_ns.models[RecommendedAppsQuery.__name__]) @login_required @account_initialization_required - @marshal_with(recommended_app_list_fields) + @marshal_with(recommended_app_list_model) def get(self): # language args args = RecommendedAppsQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore diff --git a/api/controllers/console/explore/trial.py b/api/controllers/console/explore/trial.py index 97d856bebe..1eb0cdb019 100644 --- a/api/controllers/console/explore/trial.py +++ b/api/controllers/console/explore/trial.py @@ -2,13 +2,14 @@ import logging from typing import Any, cast from flask import request -from flask_restx import Resource, marshal, marshal_with, reqparse +from flask_restx import Resource, fields, marshal, marshal_with, reqparse from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services from controllers.common.fields import Parameters as ParametersResponse from controllers.common.fields import Site as SiteResponse -from controllers.console import api +from controllers.common.schema import get_or_create_model +from controllers.console import api, console_ns from controllers.console.app.error import ( AppUnavailableError, AudioTooLargeError, @@ -42,9 +43,21 @@ from core.errors.error import ( from core.model_runtime.errors.invoke import InvokeError from core.workflow.graph_engine.manager import GraphEngineManager from extensions.ext_database import db -from fields.app_fields import app_detail_fields_with_site +from fields.app_fields import ( + app_detail_fields_with_site, + deleted_tool_fields, + model_config_fields, + site_fields, + tag_fields, +) from fields.dataset_fields import dataset_fields -from fields.workflow_fields import workflow_fields +from fields.member_fields import build_simple_account_model +from fields.workflow_fields import ( + conversation_variable_fields, + pipeline_variable_fields, + workflow_fields, + workflow_partial_fields, +) from libs import helper from libs.helper import uuid_value from libs.login import current_user @@ -74,6 +87,36 @@ from services.recommended_app_service import RecommendedAppService logger = logging.getLogger(__name__) +model_config_model = get_or_create_model("TrialAppModelConfig", model_config_fields) +workflow_partial_model = get_or_create_model("TrialWorkflowPartial", workflow_partial_fields) +deleted_tool_model = get_or_create_model("TrialDeletedTool", deleted_tool_fields) +tag_model = get_or_create_model("TrialTag", tag_fields) +site_model = get_or_create_model("TrialSite", site_fields) + +app_detail_fields_with_site_copy = app_detail_fields_with_site.copy() +app_detail_fields_with_site_copy["model_config"] = fields.Nested( + model_config_model, attribute="app_model_config", allow_null=True +) +app_detail_fields_with_site_copy["workflow"] = fields.Nested(workflow_partial_model, allow_null=True) +app_detail_fields_with_site_copy["deleted_tools"] = fields.List(fields.Nested(deleted_tool_model)) +app_detail_fields_with_site_copy["tags"] = fields.List(fields.Nested(tag_model)) +app_detail_fields_with_site_copy["site"] = fields.Nested(site_model) +app_detail_with_site_model = get_or_create_model("TrialAppDetailWithSite", app_detail_fields_with_site_copy) + +simple_account_model = build_simple_account_model(console_ns) +conversation_variable_model = get_or_create_model("TrialConversationVariable", conversation_variable_fields) +pipeline_variable_model = get_or_create_model("TrialPipelineVariable", pipeline_variable_fields) + +workflow_fields_copy = workflow_fields.copy() +workflow_fields_copy["created_by"] = fields.Nested(simple_account_model, attribute="created_by_account") +workflow_fields_copy["updated_by"] = fields.Nested( + simple_account_model, attribute="updated_by_account", allow_null=True +) +workflow_fields_copy["conversation_variables"] = fields.List(fields.Nested(conversation_variable_model)) +workflow_fields_copy["rag_pipeline_variables"] = fields.List(fields.Nested(pipeline_variable_model)) +workflow_model = get_or_create_model("TrialWorkflow", workflow_fields_copy) + + class TrialAppWorkflowRunApi(TrialAppResource): def post(self, trial_app): """ @@ -437,7 +480,7 @@ class TrialAppParameterApi(Resource): class AppApi(Resource): @trial_feature_enable @get_app_model_with_trial - @marshal_with(app_detail_fields_with_site) + @marshal_with(app_detail_with_site_model) def get(self, app_model): """Get app detail""" @@ -450,7 +493,7 @@ class AppApi(Resource): class AppWorkflowApi(Resource): @trial_feature_enable @get_app_model_with_trial - @marshal_with(workflow_fields) + @marshal_with(workflow_model) def get(self, app_model): """Get workflow detail""" if not app_model.workflow_id: diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index 527aabbc3d..38c66525b3 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -171,6 +171,19 @@ reg(ChangeEmailValidityPayload) reg(ChangeEmailResetPayload) reg(CheckEmailUniquePayload) +integrate_fields = { + "provider": fields.String, + "created_at": TimestampField, + "is_bound": fields.Boolean, + "link": fields.String, +} + +integrate_model = console_ns.model("AccountIntegrate", integrate_fields) +integrate_list_model = console_ns.model( + "AccountIntegrateList", + {"data": fields.List(fields.Nested(integrate_model))}, +) + @console_ns.route("/account/init") class AccountInitApi(Resource): @@ -336,21 +349,10 @@ class AccountPasswordApi(Resource): @console_ns.route("/account/integrates") class AccountIntegrateApi(Resource): - integrate_fields = { - "provider": fields.String, - "created_at": TimestampField, - "is_bound": fields.Boolean, - "link": fields.String, - } - - integrate_list_fields = { - "data": fields.List(fields.Nested(integrate_fields)), - } - @setup_required @login_required @account_initialization_required - @marshal_with(integrate_list_fields) + @marshal_with(integrate_list_model) def get(self): account, _ = current_account_with_tenant() diff --git a/api/controllers/console/workspace/members.py b/api/controllers/console/workspace/members.py index 01cca2a8a0..271cdce3c3 100644 --- a/api/controllers/console/workspace/members.py +++ b/api/controllers/console/workspace/members.py @@ -1,11 +1,12 @@ from urllib import parse from flask import abort, request -from flask_restx import Resource, marshal_with +from flask_restx import Resource, fields, marshal_with from pydantic import BaseModel, Field import services from configs import dify_config +from controllers.common.schema import get_or_create_model, register_enum_models from controllers.console import console_ns from controllers.console.auth.error import ( CannotTransferOwnerToSelfError, @@ -24,7 +25,7 @@ from controllers.console.wraps import ( setup_required, ) from extensions.ext_database import db -from fields.member_fields import account_with_role_list_fields +from fields.member_fields import account_with_role_fields, account_with_role_list_fields from libs.helper import extract_remote_ip from libs.login import current_account_with_tenant, login_required from models.account import Account, TenantAccountRole @@ -67,6 +68,13 @@ reg(MemberRoleUpdatePayload) reg(OwnerTransferEmailPayload) reg(OwnerTransferCheckPayload) reg(OwnerTransferPayload) +register_enum_models(console_ns, TenantAccountRole) + +account_with_role_model = get_or_create_model("AccountWithRole", account_with_role_fields) + +account_with_role_list_fields_copy = account_with_role_list_fields.copy() +account_with_role_list_fields_copy["accounts"] = fields.List(fields.Nested(account_with_role_model)) +account_with_role_list_model = get_or_create_model("AccountWithRoleList", account_with_role_list_fields_copy) @console_ns.route("/workspaces/current/members") @@ -76,7 +84,7 @@ class MemberListApi(Resource): @setup_required @login_required @account_initialization_required - @marshal_with(account_with_role_list_fields) + @marshal_with(account_with_role_list_model) def get(self): current_user, _ = current_account_with_tenant() if not current_user.current_tenant: @@ -227,7 +235,7 @@ class DatasetOperatorMemberListApi(Resource): @setup_required @login_required @account_initialization_required - @marshal_with(account_with_role_list_fields) + @marshal_with(account_with_role_list_model) def get(self): current_user, _ = current_account_with_tenant() if not current_user.current_tenant: diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py index 2def57ed7b..583e3e3057 100644 --- a/api/controllers/console/workspace/models.py +++ b/api/controllers/console/workspace/models.py @@ -5,6 +5,7 @@ from flask import request from flask_restx import Resource from pydantic import BaseModel, Field, field_validator +from controllers.common.schema import register_enum_models, register_schema_models from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, is_admin_or_owner_required, setup_required from core.model_runtime.entities.model_entities import ModelType @@ -23,12 +24,13 @@ class ParserGetDefault(BaseModel): model_type: ModelType -class ParserPostDefault(BaseModel): - class Inner(BaseModel): - model_type: ModelType - model: str | None = None - provider: str | None = None +class Inner(BaseModel): + model_type: ModelType + model: str | None = None + provider: str | None = None + +class ParserPostDefault(BaseModel): model_settings: list[Inner] @@ -105,19 +107,21 @@ class ParserParameter(BaseModel): model: str -def reg(cls: type[BaseModel]): - console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)) +register_schema_models( + console_ns, + ParserGetDefault, + ParserPostDefault, + ParserDeleteModels, + ParserPostModels, + ParserGetCredentials, + ParserCreateCredential, + ParserUpdateCredential, + ParserDeleteCredential, + ParserParameter, + Inner, +) - -reg(ParserGetDefault) -reg(ParserPostDefault) -reg(ParserDeleteModels) -reg(ParserPostModels) -reg(ParserGetCredentials) -reg(ParserCreateCredential) -reg(ParserUpdateCredential) -reg(ParserDeleteCredential) -reg(ParserParameter) +register_enum_models(console_ns, ModelType) @console_ns.route("/workspaces/current/default-model") diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index ea74fc0337..d1485bc1c0 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -8,6 +8,7 @@ from pydantic import BaseModel, Field from werkzeug.exceptions import Forbidden from configs import dify_config +from controllers.common.schema import register_enum_models, register_schema_models from controllers.console import console_ns from controllers.console.workspace import plugin_permission_required from controllers.console.wraps import account_initialization_required, is_admin_or_owner_required, setup_required @@ -20,57 +21,12 @@ from services.plugin.plugin_parameter_service import PluginParameterService from services.plugin.plugin_permission_service import PluginPermissionService from services.plugin.plugin_service import PluginService -DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}" - - -def reg(cls: type[BaseModel]): - console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)) - - -@console_ns.route("/workspaces/current/plugin/debugging-key") -class PluginDebuggingKeyApi(Resource): - @setup_required - @login_required - @account_initialization_required - @plugin_permission_required(debug_required=True) - def get(self): - _, tenant_id = current_account_with_tenant() - - try: - return { - "key": PluginService.get_debugging_key(tenant_id), - "host": dify_config.PLUGIN_REMOTE_INSTALL_HOST, - "port": dify_config.PLUGIN_REMOTE_INSTALL_PORT, - } - except PluginDaemonClientSideError as e: - raise ValueError(e) - class ParserList(BaseModel): page: int = Field(default=1, ge=1, description="Page number") page_size: int = Field(default=256, ge=1, le=256, description="Page size (1-256)") -reg(ParserList) - - -@console_ns.route("/workspaces/current/plugin/list") -class PluginListApi(Resource): - @console_ns.expect(console_ns.models[ParserList.__name__]) - @setup_required - @login_required - @account_initialization_required - def get(self): - _, tenant_id = current_account_with_tenant() - args = ParserList.model_validate(request.args.to_dict(flat=True)) # type: ignore - try: - plugins_with_total = PluginService.list_with_total(tenant_id, args.page, args.page_size) - except PluginDaemonClientSideError as e: - raise ValueError(e) - - return jsonable_encoder({"plugins": plugins_with_total.list, "total": plugins_with_total.total}) - - class ParserLatest(BaseModel): plugin_ids: list[str] @@ -180,23 +136,73 @@ class ParserReadme(BaseModel): language: str = Field(default="en-US") -reg(ParserLatest) -reg(ParserIcon) -reg(ParserAsset) -reg(ParserGithubUpload) -reg(ParserPluginIdentifiers) -reg(ParserGithubInstall) -reg(ParserPluginIdentifierQuery) -reg(ParserTasks) -reg(ParserMarketplaceUpgrade) -reg(ParserGithubUpgrade) -reg(ParserUninstall) -reg(ParserPermissionChange) -reg(ParserDynamicOptions) -reg(ParserDynamicOptionsWithCredentials) -reg(ParserPreferencesChange) -reg(ParserExcludePlugin) -reg(ParserReadme) +register_schema_models( + console_ns, + ParserList, + PluginAutoUpgradeSettingsPayload, + PluginPermissionSettingsPayload, + ParserLatest, + ParserIcon, + ParserAsset, + ParserGithubUpload, + ParserPluginIdentifiers, + ParserGithubInstall, + ParserPluginIdentifierQuery, + ParserTasks, + ParserMarketplaceUpgrade, + ParserGithubUpgrade, + ParserUninstall, + ParserPermissionChange, + ParserDynamicOptions, + ParserDynamicOptionsWithCredentials, + ParserPreferencesChange, + ParserExcludePlugin, + ParserReadme, +) + +register_enum_models( + console_ns, + TenantPluginPermission.DebugPermission, + TenantPluginAutoUpgradeStrategy.UpgradeMode, + TenantPluginAutoUpgradeStrategy.StrategySetting, + TenantPluginPermission.InstallPermission, +) + + +@console_ns.route("/workspaces/current/plugin/debugging-key") +class PluginDebuggingKeyApi(Resource): + @setup_required + @login_required + @account_initialization_required + @plugin_permission_required(debug_required=True) + def get(self): + _, tenant_id = current_account_with_tenant() + + try: + return { + "key": PluginService.get_debugging_key(tenant_id), + "host": dify_config.PLUGIN_REMOTE_INSTALL_HOST, + "port": dify_config.PLUGIN_REMOTE_INSTALL_PORT, + } + except PluginDaemonClientSideError as e: + raise ValueError(e) + + +@console_ns.route("/workspaces/current/plugin/list") +class PluginListApi(Resource): + @console_ns.expect(console_ns.models[ParserList.__name__]) + @setup_required + @login_required + @account_initialization_required + def get(self): + _, tenant_id = current_account_with_tenant() + args = ParserList.model_validate(request.args.to_dict(flat=True)) # type: ignore + try: + plugins_with_total = PluginService.list_with_total(tenant_id, args.page, args.page_size) + except PluginDaemonClientSideError as e: + raise ValueError(e) + + return jsonable_encoder({"plugins": plugins_with_total.list, "total": plugins_with_total.total}) @console_ns.route("/workspaces/current/plugin/list/latest-versions") diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index 2af269fd91..28864a140a 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -2,7 +2,7 @@ from typing import Any, Literal, cast from flask import request from flask_restx import marshal -from pydantic import BaseModel, Field, field_validator +from pydantic import BaseModel, Field, TypeAdapter, field_validator from werkzeug.exceptions import Forbidden, NotFound import services @@ -26,6 +26,14 @@ from services.dataset_service import DatasetPermissionService, DatasetService, D from services.entities.knowledge_entities.knowledge_entities import RetrievalModel from services.tag_service import TagService +DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}" + + +service_api_ns.schema_model( + DatasetPermissionEnum.__name__, + TypeAdapter(DatasetPermissionEnum).json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0), +) + class DatasetCreatePayload(BaseModel): name: str = Field(..., min_length=1, max_length=40) diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index 1260645624..c85c1cf81e 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -16,6 +16,7 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) +from controllers.common.schema import register_enum_models, register_schema_models from controllers.service_api import service_api_ns from controllers.service_api.app.error import ProviderNotInitializeError from controllers.service_api.dataset.error import ( @@ -29,12 +30,20 @@ from controllers.service_api.wraps import ( cloud_edition_billing_resource_check, ) from core.errors.error import ProviderTokenNotInitError +from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db from fields.document_fields import document_fields, document_status_fields from libs.login import current_user from models.dataset import Dataset, Document, DocumentSegment from services.dataset_service import DatasetService, DocumentService -from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig, ProcessRule, RetrievalModel +from services.entities.knowledge_entities.knowledge_entities import ( + KnowledgeConfig, + PreProcessingRule, + ProcessRule, + RetrievalModel, + Rule, + Segmentation, +) from services.file_service import FileService @@ -76,8 +85,19 @@ class DocumentListQuery(BaseModel): status: str | None = Field(default=None, description="Document status filter") -for m in [ProcessRule, RetrievalModel, DocumentTextCreatePayload, DocumentTextUpdate, DocumentListQuery]: - service_api_ns.schema_model(m.__name__, m.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)) # type: ignore +register_enum_models(service_api_ns, RetrievalMethod) + +register_schema_models( + service_api_ns, + ProcessRule, + RetrievalModel, + DocumentTextCreatePayload, + DocumentTextUpdate, + DocumentListQuery, + Rule, + PreProcessingRule, + Segmentation, +) @service_api_ns.route( diff --git a/api/controllers/service_api/dataset/segment.py b/api/controllers/service_api/dataset/segment.py index b242fd2c3e..95679e6fcb 100644 --- a/api/controllers/service_api/dataset/segment.py +++ b/api/controllers/service_api/dataset/segment.py @@ -60,6 +60,7 @@ register_schema_models( service_api_ns, SegmentCreatePayload, SegmentListQuery, + SegmentUpdateArgs, SegmentUpdatePayload, ChildChunkCreatePayload, ChildChunkListQuery, diff --git a/api/libs/login.py b/api/libs/login.py index 4b8ee2d1f8..73caa492fe 100644 --- a/api/libs/login.py +++ b/api/libs/login.py @@ -1,6 +1,8 @@ +from __future__ import annotations + from collections.abc import Callable from functools import wraps -from typing import Any +from typing import TYPE_CHECKING, Any from flask import current_app, g, has_request_context, request from flask_login.config import EXEMPT_METHODS @@ -9,7 +11,9 @@ from werkzeug.local import LocalProxy from configs import dify_config from libs.token import check_csrf_token from models import Account -from models.model import EndUser + +if TYPE_CHECKING: + from models.model import EndUser def current_account_with_tenant(): diff --git a/api/services/app_dsl_service.py b/api/services/app_dsl_service.py index edcb2a7870..0f42c99246 100644 --- a/api/services/app_dsl_service.py +++ b/api/services/app_dsl_service.py @@ -428,10 +428,10 @@ class AppDslService: # Set icon type icon_type_value = icon_type or app_data.get("icon_type") - if icon_type_value in [IconType.EMOJI.value, IconType.IMAGE.value, IconType.LINK.value]: + if icon_type_value in [IconType.EMOJI, IconType.IMAGE, IconType.LINK]: icon_type = icon_type_value else: - icon_type = IconType.EMOJI.value + icon_type = IconType.EMOJI icon = icon or str(app_data.get("icon", "")) if app: From caa30ddcc0ab9808c7304c86b40e605cc175a8dd Mon Sep 17 00:00:00 2001 From: yyh <92089059+lyzno1@users.noreply.github.com> Date: Tue, 27 Jan 2026 19:54:46 +0800 Subject: [PATCH 2/6] chore: remove project skills (#31615) --- .agent/skills/component-refactoring | 1 - .agent/skills/frontend-code-review | 1 - .agent/skills/frontend-testing | 1 - .agent/skills/orpc-contract-first | 1 - .agent/skills/skill-creator | 1 - .agent/skills/vercel-react-best-practices | 1 - .agent/skills/web-design-guidelines | 1 - .agents/skills/skill-creator/SKILL.md | 355 --- .../references/output-patterns.md | 86 - .../skill-creator/references/workflows.md | 28 - .../skill-creator/scripts/init_skill.py | 300 -- .../skill-creator/scripts/package_skill.py | 110 - .../skill-creator/scripts/quick_validate.py | 97 - .../vercel-react-best-practices/AGENTS.md | 2410 ----------------- .../vercel-react-best-practices/SKILL.md | 125 - .../rules/advanced-event-handler-refs.md | 55 - .../rules/advanced-use-latest.md | 49 - .../rules/async-api-routes.md | 38 - .../rules/async-defer-await.md | 80 - .../rules/async-dependencies.md | 36 - .../rules/async-parallel.md | 28 - .../rules/async-suspense-boundaries.md | 99 - .../rules/bundle-barrel-imports.md | 59 - .../rules/bundle-conditional.md | 31 - .../rules/bundle-defer-third-party.md | 49 - .../rules/bundle-dynamic-imports.md | 35 - .../rules/bundle-preload.md | 50 - .../rules/client-event-listeners.md | 74 - .../rules/client-localstorage-schema.md | 71 - .../rules/client-passive-event-listeners.md | 48 - .../rules/client-swr-dedup.md | 56 - .../rules/js-batch-dom-css.md | 57 - .../rules/js-cache-function-results.md | 80 - .../rules/js-cache-property-access.md | 28 - .../rules/js-cache-storage.md | 70 - .../rules/js-combine-iterations.md | 32 - .../rules/js-early-exit.md | 50 - .../rules/js-hoist-regexp.md | 45 - .../rules/js-index-maps.md | 37 - .../rules/js-length-check-first.md | 49 - .../rules/js-min-max-loop.md | 82 - .../rules/js-set-map-lookups.md | 24 - .../rules/js-tosorted-immutable.md | 57 - .../rules/rendering-activity.md | 26 - .../rules/rendering-animate-svg-wrapper.md | 47 - .../rules/rendering-conditional-render.md | 40 - .../rules/rendering-content-visibility.md | 38 - .../rules/rendering-hoist-jsx.md | 46 - .../rules/rendering-hydration-no-flicker.md | 82 - .../rules/rendering-svg-precision.md | 28 - .../rules/rerender-defer-reads.md | 39 - .../rules/rerender-dependencies.md | 45 - .../rules/rerender-derived-state.md | 29 - .../rules/rerender-functional-setstate.md | 74 - .../rules/rerender-lazy-state-init.md | 58 - .../rules/rerender-memo.md | 44 - .../rules/rerender-transitions.md | 40 - .../rules/server-after-nonblocking.md | 73 - .../rules/server-cache-lru.md | 41 - .../rules/server-cache-react.md | 76 - .../rules/server-parallel-fetching.md | 83 - .../rules/server-serialization.md | 38 - .agents/skills/web-design-guidelines/SKILL.md | 39 - .claude/settings.json | 5 - .claude/skills/skill-creator | 1 - .claude/skills/vercel-react-best-practices | 1 - .claude/skills/web-design-guidelines | 1 - .codex/skills/skill-creator | 1 - .codex/skills/vercel-react-best-practices | 1 - .codex/skills/web-design-guidelines | 1 - .cursor/skills/component-refactoring | 1 - .cursor/skills/frontend-code-review | 1 - .cursor/skills/frontend-testing | 1 - .cursor/skills/orpc-contract-first | 1 - .cursor/skills/skill-creator | 1 - .cursor/skills/vercel-react-best-practices | 1 - .cursor/skills/web-design-guidelines | 1 - .gemini/skills/component-refactoring | 1 - .gemini/skills/frontend-code-review | 1 - .gemini/skills/frontend-testing | 1 - .gemini/skills/orpc-contract-first | 1 - .gemini/skills/skill-creator | 1 - .gemini/skills/vercel-react-best-practices | 1 - .gemini/skills/web-design-guidelines | 1 - .github/skills/component-refactoring | 1 - .github/skills/frontend-code-review | 1 - .github/skills/frontend-testing | 1 - .github/skills/orpc-contract-first | 1 - .github/skills/skill-creator | 1 - .github/skills/vercel-react-best-practices | 1 - .github/skills/web-design-guidelines | 1 - 91 files changed, 6005 deletions(-) delete mode 120000 .agent/skills/component-refactoring delete mode 120000 .agent/skills/frontend-code-review delete mode 120000 .agent/skills/frontend-testing delete mode 120000 .agent/skills/orpc-contract-first delete mode 120000 .agent/skills/skill-creator delete mode 120000 .agent/skills/vercel-react-best-practices delete mode 120000 .agent/skills/web-design-guidelines delete mode 100644 .agents/skills/skill-creator/SKILL.md delete mode 100644 .agents/skills/skill-creator/references/output-patterns.md delete mode 100644 .agents/skills/skill-creator/references/workflows.md delete mode 100755 .agents/skills/skill-creator/scripts/init_skill.py delete mode 100755 .agents/skills/skill-creator/scripts/package_skill.py delete mode 100755 .agents/skills/skill-creator/scripts/quick_validate.py delete mode 100644 .agents/skills/vercel-react-best-practices/AGENTS.md delete mode 100644 .agents/skills/vercel-react-best-practices/SKILL.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/advanced-event-handler-refs.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/advanced-use-latest.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/async-api-routes.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/async-defer-await.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/async-dependencies.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/async-parallel.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/async-suspense-boundaries.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/bundle-barrel-imports.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/bundle-conditional.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/bundle-defer-third-party.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/bundle-dynamic-imports.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/bundle-preload.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/client-event-listeners.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/client-localstorage-schema.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/client-passive-event-listeners.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/client-swr-dedup.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/js-batch-dom-css.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/js-cache-function-results.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/js-cache-property-access.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/js-cache-storage.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/js-combine-iterations.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/js-early-exit.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/js-hoist-regexp.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/js-index-maps.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/js-length-check-first.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/js-min-max-loop.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/js-set-map-lookups.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/js-tosorted-immutable.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rendering-activity.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rendering-animate-svg-wrapper.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rendering-conditional-render.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rendering-content-visibility.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rendering-hoist-jsx.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rendering-hydration-no-flicker.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rendering-svg-precision.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rerender-defer-reads.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rerender-dependencies.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rerender-derived-state.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rerender-functional-setstate.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rerender-lazy-state-init.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rerender-memo.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/rerender-transitions.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/server-after-nonblocking.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/server-cache-lru.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/server-cache-react.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/server-parallel-fetching.md delete mode 100644 .agents/skills/vercel-react-best-practices/rules/server-serialization.md delete mode 100644 .agents/skills/web-design-guidelines/SKILL.md delete mode 120000 .claude/skills/skill-creator delete mode 120000 .claude/skills/vercel-react-best-practices delete mode 120000 .claude/skills/web-design-guidelines delete mode 120000 .codex/skills/skill-creator delete mode 120000 .codex/skills/vercel-react-best-practices delete mode 120000 .codex/skills/web-design-guidelines delete mode 120000 .cursor/skills/component-refactoring delete mode 120000 .cursor/skills/frontend-code-review delete mode 120000 .cursor/skills/frontend-testing delete mode 120000 .cursor/skills/orpc-contract-first delete mode 120000 .cursor/skills/skill-creator delete mode 120000 .cursor/skills/vercel-react-best-practices delete mode 120000 .cursor/skills/web-design-guidelines delete mode 120000 .gemini/skills/component-refactoring delete mode 120000 .gemini/skills/frontend-code-review delete mode 120000 .gemini/skills/frontend-testing delete mode 120000 .gemini/skills/orpc-contract-first delete mode 120000 .gemini/skills/skill-creator delete mode 120000 .gemini/skills/vercel-react-best-practices delete mode 120000 .gemini/skills/web-design-guidelines delete mode 120000 .github/skills/component-refactoring delete mode 120000 .github/skills/frontend-code-review delete mode 120000 .github/skills/frontend-testing delete mode 120000 .github/skills/orpc-contract-first delete mode 120000 .github/skills/skill-creator delete mode 120000 .github/skills/vercel-react-best-practices delete mode 120000 .github/skills/web-design-guidelines diff --git a/.agent/skills/component-refactoring b/.agent/skills/component-refactoring deleted file mode 120000 index 53ae67e2f2..0000000000 --- a/.agent/skills/component-refactoring +++ /dev/null @@ -1 +0,0 @@ -../../.agents/skills/component-refactoring \ No newline at end of file diff --git a/.agent/skills/frontend-code-review b/.agent/skills/frontend-code-review deleted file mode 120000 index 55654ffbd7..0000000000 --- a/.agent/skills/frontend-code-review +++ /dev/null @@ -1 +0,0 @@ -../../.agents/skills/frontend-code-review \ No newline at end of file diff --git a/.agent/skills/frontend-testing b/.agent/skills/frontend-testing deleted file mode 120000 index 092cec7745..0000000000 --- a/.agent/skills/frontend-testing +++ /dev/null @@ -1 +0,0 @@ -../../.agents/skills/frontend-testing \ No newline at end of file diff --git a/.agent/skills/orpc-contract-first b/.agent/skills/orpc-contract-first deleted file mode 120000 index da47b335c7..0000000000 --- a/.agent/skills/orpc-contract-first +++ /dev/null @@ -1 +0,0 @@ -../../.agents/skills/orpc-contract-first \ No newline at end of file diff --git a/.agent/skills/skill-creator b/.agent/skills/skill-creator deleted file mode 120000 index b87455490f..0000000000 --- a/.agent/skills/skill-creator +++ /dev/null @@ -1 +0,0 @@ -../../.agents/skills/skill-creator \ No newline at end of file diff --git a/.agent/skills/vercel-react-best-practices b/.agent/skills/vercel-react-best-practices deleted file mode 120000 index e567923b32..0000000000 --- a/.agent/skills/vercel-react-best-practices +++ /dev/null @@ -1 +0,0 @@ -../../.agents/skills/vercel-react-best-practices \ No newline at end of file diff --git a/.agent/skills/web-design-guidelines b/.agent/skills/web-design-guidelines deleted file mode 120000 index 886b26ded7..0000000000 --- a/.agent/skills/web-design-guidelines +++ /dev/null @@ -1 +0,0 @@ -../../.agents/skills/web-design-guidelines \ No newline at end of file diff --git a/.agents/skills/skill-creator/SKILL.md b/.agents/skills/skill-creator/SKILL.md deleted file mode 100644 index b49da5ac68..0000000000 --- a/.agents/skills/skill-creator/SKILL.md +++ /dev/null @@ -1,355 +0,0 @@ ---- -name: skill-creator -description: Guide for creating effective skills. This skill should be used when users want to create a new skill (or update an existing skill) that extends Claude's capabilities with specialized knowledge, workflows, or tool integrations. ---- - -# Skill Creator - -This skill provides guidance for creating effective skills. - -## About Skills - -Skills are modular, self-contained packages that extend Claude's capabilities by providing -specialized knowledge, workflows, and tools. Think of them as "onboarding guides" for specific -domains or tasks—they transform Claude from a general-purpose agent into a specialized agent -equipped with procedural knowledge that no model can fully possess. - -### What Skills Provide - -1. Specialized workflows - Multi-step procedures for specific domains -2. Tool integrations - Instructions for working with specific file formats or APIs -3. Domain expertise - Company-specific knowledge, schemas, business logic -4. Bundled resources - Scripts, references, and assets for complex and repetitive tasks - -## Core Principles - -### Concise is Key - -The context window is a public good. Skills share the context window with everything else Claude needs: system prompt, conversation history, other Skills' metadata, and the actual user request. - -**Default assumption: Claude is already very smart.** Only add context Claude doesn't already have. Challenge each piece of information: "Does Claude really need this explanation?" and "Does this paragraph justify its token cost?" - -Prefer concise examples over verbose explanations. - -### Set Appropriate Degrees of Freedom - -Match the level of specificity to the task's fragility and variability: - -**High freedom (text-based instructions)**: Use when multiple approaches are valid, decisions depend on context, or heuristics guide the approach. - -**Medium freedom (pseudocode or scripts with parameters)**: Use when a preferred pattern exists, some variation is acceptable, or configuration affects behavior. - -**Low freedom (specific scripts, few parameters)**: Use when operations are fragile and error-prone, consistency is critical, or a specific sequence must be followed. - -Think of Claude as exploring a path: a narrow bridge with cliffs needs specific guardrails (low freedom), while an open field allows many routes (high freedom). - -### Anatomy of a Skill - -Every skill consists of a required SKILL.md file and optional bundled resources: - -``` -skill-name/ -├── SKILL.md (required) -│ ├── YAML frontmatter metadata (required) -│ │ ├── name: (required) -│ │ └── description: (required) -│ └── Markdown instructions (required) -└── Bundled Resources (optional) - ├── scripts/ - Executable code (Python/Bash/etc.) - ├── references/ - Documentation intended to be loaded into context as needed - └── assets/ - Files used in output (templates, icons, fonts, etc.) -``` - -#### SKILL.md (required) - -Every SKILL.md consists of: - -- **Frontmatter** (YAML): Contains `name` and `description` fields. These are the only fields that Claude reads to determine when the skill gets used, thus it is very important to be clear and comprehensive in describing what the skill is, and when it should be used. -- **Body** (Markdown): Instructions and guidance for using the skill. Only loaded AFTER the skill triggers (if at all). - -#### Bundled Resources (optional) - -##### Scripts (`scripts/`) - -Executable code (Python/Bash/etc.) for tasks that require deterministic reliability or are repeatedly rewritten. - -- **When to include**: When the same code is being rewritten repeatedly or deterministic reliability is needed -- **Example**: `scripts/rotate_pdf.py` for PDF rotation tasks -- **Benefits**: Token efficient, deterministic, may be executed without loading into context -- **Note**: Scripts may still need to be read by Claude for patching or environment-specific adjustments - -##### References (`references/`) - -Documentation and reference material intended to be loaded as needed into context to inform Claude's process and thinking. - -- **When to include**: For documentation that Claude should reference while working -- **Examples**: `references/finance.md` for financial schemas, `references/mnda.md` for company NDA template, `references/policies.md` for company policies, `references/api_docs.md` for API specifications -- **Use cases**: Database schemas, API documentation, domain knowledge, company policies, detailed workflow guides -- **Benefits**: Keeps SKILL.md lean, loaded only when Claude determines it's needed -- **Best practice**: If files are large (>10k words), include grep search patterns in SKILL.md -- **Avoid duplication**: Information should live in either SKILL.md or references files, not both. Prefer references files for detailed information unless it's truly core to the skill—this keeps SKILL.md lean while making information discoverable without hogging the context window. Keep only essential procedural instructions and workflow guidance in SKILL.md; move detailed reference material, schemas, and examples to references files. - -##### Assets (`assets/`) - -Files not intended to be loaded into context, but rather used within the output Claude produces. - -- **When to include**: When the skill needs files that will be used in the final output -- **Examples**: `assets/logo.png` for brand assets, `assets/slides.pptx` for PowerPoint templates, `assets/frontend-template/` for HTML/React boilerplate, `assets/font.ttf` for typography -- **Use cases**: Templates, images, icons, boilerplate code, fonts, sample documents that get copied or modified -- **Benefits**: Separates output resources from documentation, enables Claude to use files without loading them into context - -#### What to Not Include in a Skill - -A skill should only contain essential files that directly support its functionality. Do NOT create extraneous documentation or auxiliary files, including: - -- README.md -- INSTALLATION_GUIDE.md -- QUICK_REFERENCE.md -- CHANGELOG.md -- etc. - -The skill should only contain the information needed for an AI agent to do the job at hand. It should not contain auxilary context about the process that went into creating it, setup and testing procedures, user-facing documentation, etc. Creating additional documentation files just adds clutter and confusion. - -### Progressive Disclosure Design Principle - -Skills use a three-level loading system to manage context efficiently: - -1. **Metadata (name + description)** - Always in context (~100 words) -2. **SKILL.md body** - When skill triggers (<5k words) -3. **Bundled resources** - As needed by Claude (Unlimited because scripts can be executed without reading into context window) - -#### Progressive Disclosure Patterns - -Keep SKILL.md body to the essentials and under 500 lines to minimize context bloat. Split content into separate files when approaching this limit. When splitting out content into other files, it is very important to reference them from SKILL.md and describe clearly when to read them, to ensure the reader of the skill knows they exist and when to use them. - -**Key principle:** When a skill supports multiple variations, frameworks, or options, keep only the core workflow and selection guidance in SKILL.md. Move variant-specific details (patterns, examples, configuration) into separate reference files. - -**Pattern 1: High-level guide with references** - -```markdown -# PDF Processing - -## Quick start - -Extract text with pdfplumber: -[code example] - -## Advanced features - -- **Form filling**: See [FORMS.md](FORMS.md) for complete guide -- **API reference**: See [REFERENCE.md](REFERENCE.md) for all methods -- **Examples**: See [EXAMPLES.md](EXAMPLES.md) for common patterns -``` - -Claude loads FORMS.md, REFERENCE.md, or EXAMPLES.md only when needed. - -**Pattern 2: Domain-specific organization** - -For Skills with multiple domains, organize content by domain to avoid loading irrelevant context: - -``` -bigquery-skill/ -├── SKILL.md (overview and navigation) -└── reference/ - ├── finance.md (revenue, billing metrics) - ├── sales.md (opportunities, pipeline) - ├── product.md (API usage, features) - └── marketing.md (campaigns, attribution) -``` - -When a user asks about sales metrics, Claude only reads sales.md. - -Similarly, for skills supporting multiple frameworks or variants, organize by variant: - -``` -cloud-deploy/ -├── SKILL.md (workflow + provider selection) -└── references/ - ├── aws.md (AWS deployment patterns) - ├── gcp.md (GCP deployment patterns) - └── azure.md (Azure deployment patterns) -``` - -When the user chooses AWS, Claude only reads aws.md. - -**Pattern 3: Conditional details** - -Show basic content, link to advanced content: - -```markdown -# DOCX Processing - -## Creating documents - -Use docx-js for new documents. See [DOCX-JS.md](DOCX-JS.md). - -## Editing documents - -For simple edits, modify the XML directly. - -**For tracked changes**: See [REDLINING.md](REDLINING.md) -**For OOXML details**: See [OOXML.md](OOXML.md) -``` - -Claude reads REDLINING.md or OOXML.md only when the user needs those features. - -**Important guidelines:** - -- **Avoid deeply nested references** - Keep references one level deep from SKILL.md. All reference files should link directly from SKILL.md. -- **Structure longer reference files** - For files longer than 100 lines, include a table of contents at the top so Claude can see the full scope when previewing. - -## Skill Creation Process - -Skill creation involves these steps: - -1. Understand the skill with concrete examples -2. Plan reusable skill contents (scripts, references, assets) -3. Initialize the skill (run init_skill.py) -4. Edit the skill (implement resources and write SKILL.md) -5. Package the skill (run package_skill.py) -6. Iterate based on real usage - -Follow these steps in order, skipping only if there is a clear reason why they are not applicable. - -### Step 1: Understanding the Skill with Concrete Examples - -Skip this step only when the skill's usage patterns are already clearly understood. It remains valuable even when working with an existing skill. - -To create an effective skill, clearly understand concrete examples of how the skill will be used. This understanding can come from either direct user examples or generated examples that are validated with user feedback. - -For example, when building an image-editor skill, relevant questions include: - -- "What functionality should the image-editor skill support? Editing, rotating, anything else?" -- "Can you give some examples of how this skill would be used?" -- "I can imagine users asking for things like 'Remove the red-eye from this image' or 'Rotate this image'. Are there other ways you imagine this skill being used?" -- "What would a user say that should trigger this skill?" - -To avoid overwhelming users, avoid asking too many questions in a single message. Start with the most important questions and follow up as needed for better effectiveness. - -Conclude this step when there is a clear sense of the functionality the skill should support. - -### Step 2: Planning the Reusable Skill Contents - -To turn concrete examples into an effective skill, analyze each example by: - -1. Considering how to execute on the example from scratch -2. Identifying what scripts, references, and assets would be helpful when executing these workflows repeatedly - -Example: When building a `pdf-editor` skill to handle queries like "Help me rotate this PDF," the analysis shows: - -1. Rotating a PDF requires re-writing the same code each time -2. A `scripts/rotate_pdf.py` script would be helpful to store in the skill - -Example: When designing a `frontend-webapp-builder` skill for queries like "Build me a todo app" or "Build me a dashboard to track my steps," the analysis shows: - -1. Writing a frontend webapp requires the same boilerplate HTML/React each time -2. An `assets/hello-world/` template containing the boilerplate HTML/React project files would be helpful to store in the skill - -Example: When building a `big-query` skill to handle queries like "How many users have logged in today?" the analysis shows: - -1. Querying BigQuery requires re-discovering the table schemas and relationships each time -2. A `references/schema.md` file documenting the table schemas would be helpful to store in the skill - -To establish the skill's contents, analyze each concrete example to create a list of the reusable resources to include: scripts, references, and assets. - -### Step 3: Initializing the Skill - -At this point, it is time to actually create the skill. - -Skip this step only if the skill being developed already exists, and iteration or packaging is needed. In this case, continue to the next step. - -When creating a new skill from scratch, always run the `init_skill.py` script. The script conveniently generates a new template skill directory that automatically includes everything a skill requires, making the skill creation process much more efficient and reliable. - -Usage: - -```bash -scripts/init_skill.py --path -``` - -The script: - -- Creates the skill directory at the specified path -- Generates a SKILL.md template with proper frontmatter and TODO placeholders -- Creates example resource directories: `scripts/`, `references/`, and `assets/` -- Adds example files in each directory that can be customized or deleted - -After initialization, customize or remove the generated SKILL.md and example files as needed. - -### Step 4: Edit the Skill - -When editing the (newly-generated or existing) skill, remember that the skill is being created for another instance of Claude to use. Include information that would be beneficial and non-obvious to Claude. Consider what procedural knowledge, domain-specific details, or reusable assets would help another Claude instance execute these tasks more effectively. - -#### Learn Proven Design Patterns - -Consult these helpful guides based on your skill's needs: - -- **Multi-step processes**: See references/workflows.md for sequential workflows and conditional logic -- **Specific output formats or quality standards**: See references/output-patterns.md for template and example patterns - -These files contain established best practices for effective skill design. - -#### Start with Reusable Skill Contents - -To begin implementation, start with the reusable resources identified above: `scripts/`, `references/`, and `assets/` files. Note that this step may require user input. For example, when implementing a `brand-guidelines` skill, the user may need to provide brand assets or templates to store in `assets/`, or documentation to store in `references/`. - -Added scripts must be tested by actually running them to ensure there are no bugs and that the output matches what is expected. If there are many similar scripts, only a representative sample needs to be tested to ensure confidence that they all work while balancing time to completion. - -Any example files and directories not needed for the skill should be deleted. The initialization script creates example files in `scripts/`, `references/`, and `assets/` to demonstrate structure, but most skills won't need all of them. - -#### Update SKILL.md - -**Writing Guidelines:** Always use imperative/infinitive form. - -##### Frontmatter - -Write the YAML frontmatter with `name` and `description`: - -- `name`: The skill name -- `description`: This is the primary triggering mechanism for your skill, and helps Claude understand when to use the skill. - - Include both what the Skill does and specific triggers/contexts for when to use it. - - Include all "when to use" information here - Not in the body. The body is only loaded after triggering, so "When to Use This Skill" sections in the body are not helpful to Claude. - - Example description for a `docx` skill: "Comprehensive document creation, editing, and analysis with support for tracked changes, comments, formatting preservation, and text extraction. Use when Claude needs to work with professional documents (.docx files) for: (1) Creating new documents, (2) Modifying or editing content, (3) Working with tracked changes, (4) Adding comments, or any other document tasks" - -Do not include any other fields in YAML frontmatter. - -##### Body - -Write instructions for using the skill and its bundled resources. - -### Step 5: Packaging a Skill - -Once development of the skill is complete, it must be packaged into a distributable .skill file that gets shared with the user. The packaging process automatically validates the skill first to ensure it meets all requirements: - -```bash -scripts/package_skill.py -``` - -Optional output directory specification: - -```bash -scripts/package_skill.py ./dist -``` - -The packaging script will: - -1. **Validate** the skill automatically, checking: - - - YAML frontmatter format and required fields - - Skill naming conventions and directory structure - - Description completeness and quality - - File organization and resource references - -2. **Package** the skill if validation passes, creating a .skill file named after the skill (e.g., `my-skill.skill`) that includes all files and maintains the proper directory structure for distribution. The .skill file is a zip file with a .skill extension. - -If validation fails, the script will report the errors and exit without creating a package. Fix any validation errors and run the packaging command again. - -### Step 6: Iterate - -After testing the skill, users may request improvements. Often this happens right after using the skill, with fresh context of how the skill performed. - -**Iteration workflow:** - -1. Use the skill on real tasks -2. Notice struggles or inefficiencies -3. Identify how SKILL.md or bundled resources should be updated -4. Implement changes and test again diff --git a/.agents/skills/skill-creator/references/output-patterns.md b/.agents/skills/skill-creator/references/output-patterns.md deleted file mode 100644 index 022e85fe5e..0000000000 --- a/.agents/skills/skill-creator/references/output-patterns.md +++ /dev/null @@ -1,86 +0,0 @@ -# Output Patterns - -Use these patterns when skills need to produce consistent, high-quality output. - -## Template Pattern - -Provide templates for output format. Match the level of strictness to your needs. - -**For strict requirements (like API responses or data formats):** - -```markdown -## Report structure - -ALWAYS use this exact template structure: - -# [Analysis Title] - -## Executive summary -[One-paragraph overview of key findings] - -## Key findings -- Finding 1 with supporting data -- Finding 2 with supporting data -- Finding 3 with supporting data - -## Recommendations -1. Specific actionable recommendation -2. Specific actionable recommendation -``` - -**For flexible guidance (when adaptation is useful):** - -```markdown -## Report structure - -Here is a sensible default format, but use your best judgment: - -# [Analysis Title] - -## Executive summary -[Overview] - -## Key findings -[Adapt sections based on what you discover] - -## Recommendations -[Tailor to the specific context] - -Adjust sections as needed for the specific analysis type. -``` - -## Examples Pattern - -For skills where output quality depends on seeing examples, provide input/output pairs: - -```markdown -## Commit message format - -Generate commit messages following these examples: - -**Example 1:** -Input: Added user authentication with JWT tokens -Output: -``` - -feat(auth): implement JWT-based authentication - -Add login endpoint and token validation middleware - -``` - -**Example 2:** -Input: Fixed bug where dates displayed incorrectly in reports -Output: -``` - -fix(reports): correct date formatting in timezone conversion - -Use UTC timestamps consistently across report generation - -``` - -Follow this style: type(scope): brief description, then detailed explanation. -``` - -Examples help Claude understand the desired style and level of detail more clearly than descriptions alone. diff --git a/.agents/skills/skill-creator/references/workflows.md b/.agents/skills/skill-creator/references/workflows.md deleted file mode 100644 index 54b0174078..0000000000 --- a/.agents/skills/skill-creator/references/workflows.md +++ /dev/null @@ -1,28 +0,0 @@ -# Workflow Patterns - -## Sequential Workflows - -For complex tasks, break operations into clear, sequential steps. It is often helpful to give Claude an overview of the process towards the beginning of SKILL.md: - -```markdown -Filling a PDF form involves these steps: - -1. Analyze the form (run analyze_form.py) -2. Create field mapping (edit fields.json) -3. Validate mapping (run validate_fields.py) -4. Fill the form (run fill_form.py) -5. Verify output (run verify_output.py) -``` - -## Conditional Workflows - -For tasks with branching logic, guide Claude through decision points: - -```markdown -1. Determine the modification type: - **Creating new content?** → Follow "Creation workflow" below - **Editing existing content?** → Follow "Editing workflow" below - -2. Creation workflow: [steps] -3. Editing workflow: [steps] -``` diff --git a/.agents/skills/skill-creator/scripts/init_skill.py b/.agents/skills/skill-creator/scripts/init_skill.py deleted file mode 100755 index 249fffcbbd..0000000000 --- a/.agents/skills/skill-creator/scripts/init_skill.py +++ /dev/null @@ -1,300 +0,0 @@ -#!/usr/bin/env python3 -""" -Skill Initializer - Creates a new skill from template - -Usage: - init_skill.py --path - -Examples: - init_skill.py my-new-skill --path skills/public - init_skill.py my-api-helper --path skills/private - init_skill.py custom-skill --path /custom/location -""" - -import sys -from pathlib import Path - - -SKILL_TEMPLATE = """--- -name: {skill_name} -description: [TODO: Complete and informative explanation of what the skill does and when to use it. Include WHEN to use this skill - specific scenarios, file types, or tasks that trigger it.] ---- - -# {skill_title} - -## Overview - -[TODO: 1-2 sentences explaining what this skill enables] - -## Structuring This Skill - -[TODO: Choose the structure that best fits this skill's purpose. Common patterns: - -**1. Workflow-Based** (best for sequential processes) -- Works well when there are clear step-by-step procedures -- Example: DOCX skill with "Workflow Decision Tree" → "Reading" → "Creating" → "Editing" -- Structure: ## Overview → ## Workflow Decision Tree → ## Step 1 → ## Step 2... - -**2. Task-Based** (best for tool collections) -- Works well when the skill offers different operations/capabilities -- Example: PDF skill with "Quick Start" → "Merge PDFs" → "Split PDFs" → "Extract Text" -- Structure: ## Overview → ## Quick Start → ## Task Category 1 → ## Task Category 2... - -**3. Reference/Guidelines** (best for standards or specifications) -- Works well for brand guidelines, coding standards, or requirements -- Example: Brand styling with "Brand Guidelines" → "Colors" → "Typography" → "Features" -- Structure: ## Overview → ## Guidelines → ## Specifications → ## Usage... - -**4. Capabilities-Based** (best for integrated systems) -- Works well when the skill provides multiple interrelated features -- Example: Product Management with "Core Capabilities" → numbered capability list -- Structure: ## Overview → ## Core Capabilities → ### 1. Feature → ### 2. Feature... - -Patterns can be mixed and matched as needed. Most skills combine patterns (e.g., start with task-based, add workflow for complex operations). - -Delete this entire "Structuring This Skill" section when done - it's just guidance.] - -## [TODO: Replace with the first main section based on chosen structure] - -[TODO: Add content here. See examples in existing skills: -- Code samples for technical skills -- Decision trees for complex workflows -- Concrete examples with realistic user requests -- References to scripts/templates/references as needed] - -## Resources - -This skill includes example resource directories that demonstrate how to organize different types of bundled resources: - -### scripts/ -Executable code (Python/Bash/etc.) that can be run directly to perform specific operations. - -**Examples from other skills:** -- PDF skill: `fill_fillable_fields.py`, `extract_form_field_info.py` - utilities for PDF manipulation -- DOCX skill: `document.py`, `utilities.py` - Python modules for document processing - -**Appropriate for:** Python scripts, shell scripts, or any executable code that performs automation, data processing, or specific operations. - -**Note:** Scripts may be executed without loading into context, but can still be read by Claude for patching or environment adjustments. - -### references/ -Documentation and reference material intended to be loaded into context to inform Claude's process and thinking. - -**Examples from other skills:** -- Product management: `communication.md`, `context_building.md` - detailed workflow guides -- BigQuery: API reference documentation and query examples -- Finance: Schema documentation, company policies - -**Appropriate for:** In-depth documentation, API references, database schemas, comprehensive guides, or any detailed information that Claude should reference while working. - -### assets/ -Files not intended to be loaded into context, but rather used within the output Claude produces. - -**Examples from other skills:** -- Brand styling: PowerPoint template files (.pptx), logo files -- Frontend builder: HTML/React boilerplate project directories -- Typography: Font files (.ttf, .woff2) - -**Appropriate for:** Templates, boilerplate code, document templates, images, icons, fonts, or any files meant to be copied or used in the final output. - ---- - -**Any unneeded directories can be deleted.** Not every skill requires all three types of resources. -""" - -EXAMPLE_SCRIPT = '''#!/usr/bin/env python3 -""" -Example helper script for {skill_name} - -This is a placeholder script that can be executed directly. -Replace with actual implementation or delete if not needed. - -Example real scripts from other skills: -- pdf/scripts/fill_fillable_fields.py - Fills PDF form fields -- pdf/scripts/convert_pdf_to_images.py - Converts PDF pages to images -""" - -def main(): - print("This is an example script for {skill_name}") - # TODO: Add actual script logic here - # This could be data processing, file conversion, API calls, etc. - -if __name__ == "__main__": - main() -''' - -EXAMPLE_REFERENCE = """# Reference Documentation for {skill_title} - -This is a placeholder for detailed reference documentation. -Replace with actual reference content or delete if not needed. - -Example real reference docs from other skills: -- product-management/references/communication.md - Comprehensive guide for status updates -- product-management/references/context_building.md - Deep-dive on gathering context -- bigquery/references/ - API references and query examples - -## When Reference Docs Are Useful - -Reference docs are ideal for: -- Comprehensive API documentation -- Detailed workflow guides -- Complex multi-step processes -- Information too lengthy for main SKILL.md -- Content that's only needed for specific use cases - -## Structure Suggestions - -### API Reference Example -- Overview -- Authentication -- Endpoints with examples -- Error codes -- Rate limits - -### Workflow Guide Example -- Prerequisites -- Step-by-step instructions -- Common patterns -- Troubleshooting -- Best practices -""" - -EXAMPLE_ASSET = """# Example Asset File - -This placeholder represents where asset files would be stored. -Replace with actual asset files (templates, images, fonts, etc.) or delete if not needed. - -Asset files are NOT intended to be loaded into context, but rather used within -the output Claude produces. - -Example asset files from other skills: -- Brand guidelines: logo.png, slides_template.pptx -- Frontend builder: hello-world/ directory with HTML/React boilerplate -- Typography: custom-font.ttf, font-family.woff2 -- Data: sample_data.csv, test_dataset.json - -## Common Asset Types - -- Templates: .pptx, .docx, boilerplate directories -- Images: .png, .jpg, .svg, .gif -- Fonts: .ttf, .otf, .woff, .woff2 -- Boilerplate code: Project directories, starter files -- Icons: .ico, .svg -- Data files: .csv, .json, .xml, .yaml - -Note: This is a text placeholder. Actual assets can be any file type. -""" - - -def title_case_skill_name(skill_name): - """Convert hyphenated skill name to Title Case for display.""" - return " ".join(word.capitalize() for word in skill_name.split("-")) - - -def init_skill(skill_name, path): - """ - Initialize a new skill directory with template SKILL.md. - - Args: - skill_name: Name of the skill - path: Path where the skill directory should be created - - Returns: - Path to created skill directory, or None if error - """ - # Determine skill directory path - skill_dir = Path(path).resolve() / skill_name - - # Check if directory already exists - if skill_dir.exists(): - print(f"❌ Error: Skill directory already exists: {skill_dir}") - return None - - # Create skill directory - try: - skill_dir.mkdir(parents=True, exist_ok=False) - print(f"✅ Created skill directory: {skill_dir}") - except Exception as e: - print(f"❌ Error creating directory: {e}") - return None - - # Create SKILL.md from template - skill_title = title_case_skill_name(skill_name) - skill_content = SKILL_TEMPLATE.format(skill_name=skill_name, skill_title=skill_title) - - skill_md_path = skill_dir / "SKILL.md" - try: - skill_md_path.write_text(skill_content) - print("✅ Created SKILL.md") - except Exception as e: - print(f"❌ Error creating SKILL.md: {e}") - return None - - # Create resource directories with example files - try: - # Create scripts/ directory with example script - scripts_dir = skill_dir / "scripts" - scripts_dir.mkdir(exist_ok=True) - example_script = scripts_dir / "example.py" - example_script.write_text(EXAMPLE_SCRIPT.format(skill_name=skill_name)) - example_script.chmod(0o755) - print("✅ Created scripts/example.py") - - # Create references/ directory with example reference doc - references_dir = skill_dir / "references" - references_dir.mkdir(exist_ok=True) - example_reference = references_dir / "api_reference.md" - example_reference.write_text(EXAMPLE_REFERENCE.format(skill_title=skill_title)) - print("✅ Created references/api_reference.md") - - # Create assets/ directory with example asset placeholder - assets_dir = skill_dir / "assets" - assets_dir.mkdir(exist_ok=True) - example_asset = assets_dir / "example_asset.txt" - example_asset.write_text(EXAMPLE_ASSET) - print("✅ Created assets/example_asset.txt") - except Exception as e: - print(f"❌ Error creating resource directories: {e}") - return None - - # Print next steps - print(f"\n✅ Skill '{skill_name}' initialized successfully at {skill_dir}") - print("\nNext steps:") - print("1. Edit SKILL.md to complete the TODO items and update the description") - print("2. Customize or delete the example files in scripts/, references/, and assets/") - print("3. Run the validator when ready to check the skill structure") - - return skill_dir - - -def main(): - if len(sys.argv) < 4 or sys.argv[2] != "--path": - print("Usage: init_skill.py --path ") - print("\nSkill name requirements:") - print(" - Hyphen-case identifier (e.g., 'data-analyzer')") - print(" - Lowercase letters, digits, and hyphens only") - print(" - Max 40 characters") - print(" - Must match directory name exactly") - print("\nExamples:") - print(" init_skill.py my-new-skill --path skills/public") - print(" init_skill.py my-api-helper --path skills/private") - print(" init_skill.py custom-skill --path /custom/location") - sys.exit(1) - - skill_name = sys.argv[1] - path = sys.argv[3] - - print(f"🚀 Initializing skill: {skill_name}") - print(f" Location: {path}") - print() - - result = init_skill(skill_name, path) - - if result: - sys.exit(0) - else: - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/.agents/skills/skill-creator/scripts/package_skill.py b/.agents/skills/skill-creator/scripts/package_skill.py deleted file mode 100755 index 736b928be0..0000000000 --- a/.agents/skills/skill-creator/scripts/package_skill.py +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env python3 -""" -Skill Packager - Creates a distributable .skill file of a skill folder - -Usage: - python utils/package_skill.py [output-directory] - -Example: - python utils/package_skill.py skills/public/my-skill - python utils/package_skill.py skills/public/my-skill ./dist -""" - -import sys -import zipfile -from pathlib import Path -from quick_validate import validate_skill - - -def package_skill(skill_path, output_dir=None): - """ - Package a skill folder into a .skill file. - - Args: - skill_path: Path to the skill folder - output_dir: Optional output directory for the .skill file (defaults to current directory) - - Returns: - Path to the created .skill file, or None if error - """ - skill_path = Path(skill_path).resolve() - - # Validate skill folder exists - if not skill_path.exists(): - print(f"❌ Error: Skill folder not found: {skill_path}") - return None - - if not skill_path.is_dir(): - print(f"❌ Error: Path is not a directory: {skill_path}") - return None - - # Validate SKILL.md exists - skill_md = skill_path / "SKILL.md" - if not skill_md.exists(): - print(f"❌ Error: SKILL.md not found in {skill_path}") - return None - - # Run validation before packaging - print("🔍 Validating skill...") - valid, message = validate_skill(skill_path) - if not valid: - print(f"❌ Validation failed: {message}") - print(" Please fix the validation errors before packaging.") - return None - print(f"✅ {message}\n") - - # Determine output location - skill_name = skill_path.name - if output_dir: - output_path = Path(output_dir).resolve() - output_path.mkdir(parents=True, exist_ok=True) - else: - output_path = Path.cwd() - - skill_filename = output_path / f"{skill_name}.skill" - - # Create the .skill file (zip format) - try: - with zipfile.ZipFile(skill_filename, "w", zipfile.ZIP_DEFLATED) as zipf: - # Walk through the skill directory - for file_path in skill_path.rglob("*"): - if file_path.is_file(): - # Calculate the relative path within the zip - arcname = file_path.relative_to(skill_path.parent) - zipf.write(file_path, arcname) - print(f" Added: {arcname}") - - print(f"\n✅ Successfully packaged skill to: {skill_filename}") - return skill_filename - - except Exception as e: - print(f"❌ Error creating .skill file: {e}") - return None - - -def main(): - if len(sys.argv) < 2: - print("Usage: python utils/package_skill.py [output-directory]") - print("\nExample:") - print(" python utils/package_skill.py skills/public/my-skill") - print(" python utils/package_skill.py skills/public/my-skill ./dist") - sys.exit(1) - - skill_path = sys.argv[1] - output_dir = sys.argv[2] if len(sys.argv) > 2 else None - - print(f"📦 Packaging skill: {skill_path}") - if output_dir: - print(f" Output directory: {output_dir}") - print() - - result = package_skill(skill_path, output_dir) - - if result: - sys.exit(0) - else: - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/.agents/skills/skill-creator/scripts/quick_validate.py b/.agents/skills/skill-creator/scripts/quick_validate.py deleted file mode 100755 index 66eb0a71bf..0000000000 --- a/.agents/skills/skill-creator/scripts/quick_validate.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python3 -""" -Quick validation script for skills - minimal version -""" - -import sys -import os -import re -import yaml -from pathlib import Path - - -def validate_skill(skill_path): - """Basic validation of a skill""" - skill_path = Path(skill_path) - - # Check SKILL.md exists - skill_md = skill_path / "SKILL.md" - if not skill_md.exists(): - return False, "SKILL.md not found" - - # Read and validate frontmatter - content = skill_md.read_text() - if not content.startswith("---"): - return False, "No YAML frontmatter found" - - # Extract frontmatter - match = re.match(r"^---\n(.*?)\n---", content, re.DOTALL) - if not match: - return False, "Invalid frontmatter format" - - frontmatter_text = match.group(1) - - # Parse YAML frontmatter - try: - frontmatter = yaml.safe_load(frontmatter_text) - if not isinstance(frontmatter, dict): - return False, "Frontmatter must be a YAML dictionary" - except yaml.YAMLError as e: - return False, f"Invalid YAML in frontmatter: {e}" - - # Define allowed properties - ALLOWED_PROPERTIES = {"name", "description", "license", "allowed-tools", "metadata"} - - # Check for unexpected properties (excluding nested keys under metadata) - unexpected_keys = set(frontmatter.keys()) - ALLOWED_PROPERTIES - if unexpected_keys: - return False, ( - f"Unexpected key(s) in SKILL.md frontmatter: {', '.join(sorted(unexpected_keys))}. " - f"Allowed properties are: {', '.join(sorted(ALLOWED_PROPERTIES))}" - ) - - # Check required fields - if "name" not in frontmatter: - return False, "Missing 'name' in frontmatter" - if "description" not in frontmatter: - return False, "Missing 'description' in frontmatter" - - # Extract name for validation - name = frontmatter.get("name", "") - if not isinstance(name, str): - return False, f"Name must be a string, got {type(name).__name__}" - name = name.strip() - if name: - # Check naming convention (hyphen-case: lowercase with hyphens) - if not re.match(r"^[a-z0-9-]+$", name): - return False, f"Name '{name}' should be hyphen-case (lowercase letters, digits, and hyphens only)" - if name.startswith("-") or name.endswith("-") or "--" in name: - return False, f"Name '{name}' cannot start/end with hyphen or contain consecutive hyphens" - # Check name length (max 64 characters per spec) - if len(name) > 64: - return False, f"Name is too long ({len(name)} characters). Maximum is 64 characters." - - # Extract and validate description - description = frontmatter.get("description", "") - if not isinstance(description, str): - return False, f"Description must be a string, got {type(description).__name__}" - description = description.strip() - if description: - # Check for angle brackets - if "<" in description or ">" in description: - return False, "Description cannot contain angle brackets (< or >)" - # Check description length (max 1024 characters per spec) - if len(description) > 1024: - return False, f"Description is too long ({len(description)} characters). Maximum is 1024 characters." - - return True, "Skill is valid!" - - -if __name__ == "__main__": - if len(sys.argv) != 2: - print("Usage: python quick_validate.py ") - sys.exit(1) - - valid, message = validate_skill(sys.argv[1]) - print(message) - sys.exit(0 if valid else 1) diff --git a/.agents/skills/vercel-react-best-practices/AGENTS.md b/.agents/skills/vercel-react-best-practices/AGENTS.md deleted file mode 100644 index f9b9e99c44..0000000000 --- a/.agents/skills/vercel-react-best-practices/AGENTS.md +++ /dev/null @@ -1,2410 +0,0 @@ -# React Best Practices - -**Version 1.0.0** -Vercel Engineering -January 2026 - -> **Note:** -> This document is mainly for agents and LLMs to follow when maintaining, -> generating, or refactoring React and Next.js codebases at Vercel. Humans -> may also find it useful, but guidance here is optimized for automation -> and consistency by AI-assisted workflows. - ---- - -## Abstract - -Comprehensive performance optimization guide for React and Next.js applications, designed for AI agents and LLMs. Contains 40+ rules across 8 categories, prioritized by impact from critical (eliminating waterfalls, reducing bundle size) to incremental (advanced patterns). Each rule includes detailed explanations, real-world examples comparing incorrect vs. correct implementations, and specific impact metrics to guide automated refactoring and code generation. - ---- - -## Table of Contents - -1. [Eliminating Waterfalls](#1-eliminating-waterfalls) — **CRITICAL** - - 1.1 [Defer Await Until Needed](#11-defer-await-until-needed) - - 1.2 [Dependency-Based Parallelization](#12-dependency-based-parallelization) - - 1.3 [Prevent Waterfall Chains in API Routes](#13-prevent-waterfall-chains-in-api-routes) - - 1.4 [Promise.all() for Independent Operations](#14-promiseall-for-independent-operations) - - 1.5 [Strategic Suspense Boundaries](#15-strategic-suspense-boundaries) -2. [Bundle Size Optimization](#2-bundle-size-optimization) — **CRITICAL** - - 2.1 [Avoid Barrel File Imports](#21-avoid-barrel-file-imports) - - 2.2 [Conditional Module Loading](#22-conditional-module-loading) - - 2.3 [Defer Non-Critical Third-Party Libraries](#23-defer-non-critical-third-party-libraries) - - 2.4 [Dynamic Imports for Heavy Components](#24-dynamic-imports-for-heavy-components) - - 2.5 [Preload Based on User Intent](#25-preload-based-on-user-intent) -3. [Server-Side Performance](#3-server-side-performance) — **HIGH** - - 3.1 [Cross-Request LRU Caching](#31-cross-request-lru-caching) - - 3.2 [Minimize Serialization at RSC Boundaries](#32-minimize-serialization-at-rsc-boundaries) - - 3.3 [Parallel Data Fetching with Component Composition](#33-parallel-data-fetching-with-component-composition) - - 3.4 [Per-Request Deduplication with React.cache()](#34-per-request-deduplication-with-reactcache) - - 3.5 [Use after() for Non-Blocking Operations](#35-use-after-for-non-blocking-operations) -4. [Client-Side Data Fetching](#4-client-side-data-fetching) — **MEDIUM-HIGH** - - 4.1 [Deduplicate Global Event Listeners](#41-deduplicate-global-event-listeners) - - 4.2 [Use Passive Event Listeners for Scrolling Performance](#42-use-passive-event-listeners-for-scrolling-performance) - - 4.3 [Use SWR for Automatic Deduplication](#43-use-swr-for-automatic-deduplication) - - 4.4 [Version and Minimize localStorage Data](#44-version-and-minimize-localstorage-data) -5. [Re-render Optimization](#5-re-render-optimization) — **MEDIUM** - - 5.1 [Defer State Reads to Usage Point](#51-defer-state-reads-to-usage-point) - - 5.2 [Extract to Memoized Components](#52-extract-to-memoized-components) - - 5.3 [Narrow Effect Dependencies](#53-narrow-effect-dependencies) - - 5.4 [Subscribe to Derived State](#54-subscribe-to-derived-state) - - 5.5 [Use Functional setState Updates](#55-use-functional-setstate-updates) - - 5.6 [Use Lazy State Initialization](#56-use-lazy-state-initialization) - - 5.7 [Use Transitions for Non-Urgent Updates](#57-use-transitions-for-non-urgent-updates) -6. [Rendering Performance](#6-rendering-performance) — **MEDIUM** - - 6.1 [Animate SVG Wrapper Instead of SVG Element](#61-animate-svg-wrapper-instead-of-svg-element) - - 6.2 [CSS content-visibility for Long Lists](#62-css-content-visibility-for-long-lists) - - 6.3 [Hoist Static JSX Elements](#63-hoist-static-jsx-elements) - - 6.4 [Optimize SVG Precision](#64-optimize-svg-precision) - - 6.5 [Prevent Hydration Mismatch Without Flickering](#65-prevent-hydration-mismatch-without-flickering) - - 6.6 [Use Activity Component for Show/Hide](#66-use-activity-component-for-showhide) - - 6.7 [Use Explicit Conditional Rendering](#67-use-explicit-conditional-rendering) -7. [JavaScript Performance](#7-javascript-performance) — **LOW-MEDIUM** - - 7.1 [Batch DOM CSS Changes](#71-batch-dom-css-changes) - - 7.2 [Build Index Maps for Repeated Lookups](#72-build-index-maps-for-repeated-lookups) - - 7.3 [Cache Property Access in Loops](#73-cache-property-access-in-loops) - - 7.4 [Cache Repeated Function Calls](#74-cache-repeated-function-calls) - - 7.5 [Cache Storage API Calls](#75-cache-storage-api-calls) - - 7.6 [Combine Multiple Array Iterations](#76-combine-multiple-array-iterations) - - 7.7 [Early Length Check for Array Comparisons](#77-early-length-check-for-array-comparisons) - - 7.8 [Early Return from Functions](#78-early-return-from-functions) - - 7.9 [Hoist RegExp Creation](#79-hoist-regexp-creation) - - 7.10 [Use Loop for Min/Max Instead of Sort](#710-use-loop-for-minmax-instead-of-sort) - - 7.11 [Use Set/Map for O(1) Lookups](#711-use-setmap-for-o1-lookups) - - 7.12 [Use toSorted() Instead of sort() for Immutability](#712-use-tosorted-instead-of-sort-for-immutability) -8. [Advanced Patterns](#8-advanced-patterns) — **LOW** - - 8.1 [Store Event Handlers in Refs](#81-store-event-handlers-in-refs) - - 8.2 [useLatest for Stable Callback Refs](#82-uselatest-for-stable-callback-refs) - ---- - -## 1. Eliminating Waterfalls - -**Impact: CRITICAL** - -Waterfalls are the #1 performance killer. Each sequential await adds full network latency. Eliminating them yields the largest gains. - -### 1.1 Defer Await Until Needed - -**Impact: HIGH (avoids blocking unused code paths)** - -Move `await` operations into the branches where they're actually used to avoid blocking code paths that don't need them. - -**Incorrect: blocks both branches** - -```typescript -async function handleRequest(userId: string, skipProcessing: boolean) { - const userData = await fetchUserData(userId) - - if (skipProcessing) { - // Returns immediately but still waited for userData - return { skipped: true } - } - - // Only this branch uses userData - return processUserData(userData) -} -``` - -**Correct: only blocks when needed** - -```typescript -async function handleRequest(userId: string, skipProcessing: boolean) { - if (skipProcessing) { - // Returns immediately without waiting - return { skipped: true } - } - - // Fetch only when needed - const userData = await fetchUserData(userId) - return processUserData(userData) -} -``` - -**Another example: early return optimization** - -```typescript -// Incorrect: always fetches permissions -async function updateResource(resourceId: string, userId: string) { - const permissions = await fetchPermissions(userId) - const resource = await getResource(resourceId) - - if (!resource) { - return { error: 'Not found' } - } - - if (!permissions.canEdit) { - return { error: 'Forbidden' } - } - - return await updateResourceData(resource, permissions) -} - -// Correct: fetches only when needed -async function updateResource(resourceId: string, userId: string) { - const resource = await getResource(resourceId) - - if (!resource) { - return { error: 'Not found' } - } - - const permissions = await fetchPermissions(userId) - - if (!permissions.canEdit) { - return { error: 'Forbidden' } - } - - return await updateResourceData(resource, permissions) -} -``` - -This optimization is especially valuable when the skipped branch is frequently taken, or when the deferred operation is expensive. - -### 1.2 Dependency-Based Parallelization - -**Impact: CRITICAL (2-10× improvement)** - -For operations with partial dependencies, use `better-all` to maximize parallelism. It automatically starts each task at the earliest possible moment. - -**Incorrect: profile waits for config unnecessarily** - -```typescript -const [user, config] = await Promise.all([ - fetchUser(), - fetchConfig() -]) -const profile = await fetchProfile(user.id) -``` - -**Correct: config and profile run in parallel** - -```typescript -import { all } from 'better-all' - -const { user, config, profile } = await all({ - async user() { return fetchUser() }, - async config() { return fetchConfig() }, - async profile() { - return fetchProfile((await this.$.user).id) - } -}) -``` - -Reference: [https://github.com/shuding/better-all](https://github.com/shuding/better-all) - -### 1.3 Prevent Waterfall Chains in API Routes - -**Impact: CRITICAL (2-10× improvement)** - -In API routes and Server Actions, start independent operations immediately, even if you don't await them yet. - -**Incorrect: config waits for auth, data waits for both** - -```typescript -export async function GET(request: Request) { - const session = await auth() - const config = await fetchConfig() - const data = await fetchData(session.user.id) - return Response.json({ data, config }) -} -``` - -**Correct: auth and config start immediately** - -```typescript -export async function GET(request: Request) { - const sessionPromise = auth() - const configPromise = fetchConfig() - const session = await sessionPromise - const [config, data] = await Promise.all([ - configPromise, - fetchData(session.user.id) - ]) - return Response.json({ data, config }) -} -``` - -For operations with more complex dependency chains, use `better-all` to automatically maximize parallelism (see Dependency-Based Parallelization). - -### 1.4 Promise.all() for Independent Operations - -**Impact: CRITICAL (2-10× improvement)** - -When async operations have no interdependencies, execute them concurrently using `Promise.all()`. - -**Incorrect: sequential execution, 3 round trips** - -```typescript -const user = await fetchUser() -const posts = await fetchPosts() -const comments = await fetchComments() -``` - -**Correct: parallel execution, 1 round trip** - -```typescript -const [user, posts, comments] = await Promise.all([ - fetchUser(), - fetchPosts(), - fetchComments() -]) -``` - -### 1.5 Strategic Suspense Boundaries - -**Impact: HIGH (faster initial paint)** - -Instead of awaiting data in async components before returning JSX, use Suspense boundaries to show the wrapper UI faster while data loads. - -**Incorrect: wrapper blocked by data fetching** - -```tsx -async function Page() { - const data = await fetchData() // Blocks entire page - - return ( -
-
Sidebar
-
Header
-
- -
-
Footer
-
- ) -} -``` - -The entire layout waits for data even though only the middle section needs it. - -**Correct: wrapper shows immediately, data streams in** - -```tsx -function Page() { - return ( -
-
Sidebar
-
Header
-
- }> - - -
-
Footer
-
- ) -} - -async function DataDisplay() { - const data = await fetchData() // Only blocks this component - return
{data.content}
-} -``` - -Sidebar, Header, and Footer render immediately. Only DataDisplay waits for data. - -**Alternative: share promise across components** - -```tsx -function Page() { - // Start fetch immediately, but don't await - const dataPromise = fetchData() - - return ( -
-
Sidebar
-
Header
- }> - - - -
Footer
-
- ) -} - -function DataDisplay({ dataPromise }: { dataPromise: Promise }) { - const data = use(dataPromise) // Unwraps the promise - return
{data.content}
-} - -function DataSummary({ dataPromise }: { dataPromise: Promise }) { - const data = use(dataPromise) // Reuses the same promise - return
{data.summary}
-} -``` - -Both components share the same promise, so only one fetch occurs. Layout renders immediately while both components wait together. - -**When NOT to use this pattern:** - -- Critical data needed for layout decisions (affects positioning) - -- SEO-critical content above the fold - -- Small, fast queries where suspense overhead isn't worth it - -- When you want to avoid layout shift (loading → content jump) - -**Trade-off:** Faster initial paint vs potential layout shift. Choose based on your UX priorities. - ---- - -## 2. Bundle Size Optimization - -**Impact: CRITICAL** - -Reducing initial bundle size improves Time to Interactive and Largest Contentful Paint. - -### 2.1 Avoid Barrel File Imports - -**Impact: CRITICAL (200-800ms import cost, slow builds)** - -Import directly from source files instead of barrel files to avoid loading thousands of unused modules. **Barrel files** are entry points that re-export multiple modules (e.g., `index.js` that does `export * from './module'`). - -Popular icon and component libraries can have **up to 10,000 re-exports** in their entry file. For many React packages, **it takes 200-800ms just to import them**, affecting both development speed and production cold starts. - -**Why tree-shaking doesn't help:** When a library is marked as external (not bundled), the bundler can't optimize it. If you bundle it to enable tree-shaking, builds become substantially slower analyzing the entire module graph. - -**Incorrect: imports entire library** - -```tsx -import { Check, X, Menu } from 'lucide-react' -// Loads 1,583 modules, takes ~2.8s extra in dev -// Runtime cost: 200-800ms on every cold start - -import { Button, TextField } from '@mui/material' -// Loads 2,225 modules, takes ~4.2s extra in dev -``` - -**Correct: imports only what you need** - -```tsx -import Check from 'lucide-react/dist/esm/icons/check' -import X from 'lucide-react/dist/esm/icons/x' -import Menu from 'lucide-react/dist/esm/icons/menu' -// Loads only 3 modules (~2KB vs ~1MB) - -import Button from '@mui/material/Button' -import TextField from '@mui/material/TextField' -// Loads only what you use -``` - -**Alternative: Next.js 13.5+** - -```js -// next.config.js - use optimizePackageImports -module.exports = { - experimental: { - optimizePackageImports: ['lucide-react', '@mui/material'] - } -} - -// Then you can keep the ergonomic barrel imports: -import { Check, X, Menu } from 'lucide-react' -// Automatically transformed to direct imports at build time -``` - -Direct imports provide 15-70% faster dev boot, 28% faster builds, 40% faster cold starts, and significantly faster HMR. - -Libraries commonly affected: `lucide-react`, `@mui/material`, `@mui/icons-material`, `@tabler/icons-react`, `react-icons`, `@headlessui/react`, `@radix-ui/react-*`, `lodash`, `ramda`, `date-fns`, `rxjs`, `react-use`. - -Reference: [https://vercel.com/blog/how-we-optimized-package-imports-in-next-js](https://vercel.com/blog/how-we-optimized-package-imports-in-next-js) - -### 2.2 Conditional Module Loading - -**Impact: HIGH (loads large data only when needed)** - -Load large data or modules only when a feature is activated. - -**Example: lazy-load animation frames** - -```tsx -function AnimationPlayer({ enabled, setEnabled }: { enabled: boolean; setEnabled: React.Dispatch> }) { - const [frames, setFrames] = useState(null) - - useEffect(() => { - if (enabled && !frames && typeof window !== 'undefined') { - import('./animation-frames.js') - .then(mod => setFrames(mod.frames)) - .catch(() => setEnabled(false)) - } - }, [enabled, frames, setEnabled]) - - if (!frames) return - return -} -``` - -The `typeof window !== 'undefined'` check prevents bundling this module for SSR, optimizing server bundle size and build speed. - -### 2.3 Defer Non-Critical Third-Party Libraries - -**Impact: MEDIUM (loads after hydration)** - -Analytics, logging, and error tracking don't block user interaction. Load them after hydration. - -**Incorrect: blocks initial bundle** - -```tsx -import { Analytics } from '@vercel/analytics/react' - -export default function RootLayout({ children }) { - return ( - - - {children} - - - - ) -} -``` - -**Correct: loads after hydration** - -```tsx -import dynamic from 'next/dynamic' - -const Analytics = dynamic( - () => import('@vercel/analytics/react').then(m => m.Analytics), - { ssr: false } -) - -export default function RootLayout({ children }) { - return ( - - - {children} - - - - ) -} -``` - -### 2.4 Dynamic Imports for Heavy Components - -**Impact: CRITICAL (directly affects TTI and LCP)** - -Use `next/dynamic` to lazy-load large components not needed on initial render. - -**Incorrect: Monaco bundles with main chunk ~300KB** - -```tsx -import { MonacoEditor } from './monaco-editor' - -function CodePanel({ code }: { code: string }) { - return -} -``` - -**Correct: Monaco loads on demand** - -```tsx -import dynamic from 'next/dynamic' - -const MonacoEditor = dynamic( - () => import('./monaco-editor').then(m => m.MonacoEditor), - { ssr: false } -) - -function CodePanel({ code }: { code: string }) { - return -} -``` - -### 2.5 Preload Based on User Intent - -**Impact: MEDIUM (reduces perceived latency)** - -Preload heavy bundles before they're needed to reduce perceived latency. - -**Example: preload on hover/focus** - -```tsx -function EditorButton({ onClick }: { onClick: () => void }) { - const preload = () => { - if (typeof window !== 'undefined') { - void import('./monaco-editor') - } - } - - return ( - - ) -} -``` - -**Example: preload when feature flag is enabled** - -```tsx -function FlagsProvider({ children, flags }: Props) { - useEffect(() => { - if (flags.editorEnabled && typeof window !== 'undefined') { - void import('./monaco-editor').then(mod => mod.init()) - } - }, [flags.editorEnabled]) - - return - {children} - -} -``` - -The `typeof window !== 'undefined'` check prevents bundling preloaded modules for SSR, optimizing server bundle size and build speed. - ---- - -## 3. Server-Side Performance - -**Impact: HIGH** - -Optimizing server-side rendering and data fetching eliminates server-side waterfalls and reduces response times. - -### 3.1 Cross-Request LRU Caching - -**Impact: HIGH (caches across requests)** - -`React.cache()` only works within one request. For data shared across sequential requests (user clicks button A then button B), use an LRU cache. - -**Implementation:** - -```typescript -import { LRUCache } from 'lru-cache' - -const cache = new LRUCache({ - max: 1000, - ttl: 5 * 60 * 1000 // 5 minutes -}) - -export async function getUser(id: string) { - const cached = cache.get(id) - if (cached) return cached - - const user = await db.user.findUnique({ where: { id } }) - cache.set(id, user) - return user -} - -// Request 1: DB query, result cached -// Request 2: cache hit, no DB query -``` - -Use when sequential user actions hit multiple endpoints needing the same data within seconds. - -**With Vercel's [Fluid Compute](https://vercel.com/docs/fluid-compute):** LRU caching is especially effective because multiple concurrent requests can share the same function instance and cache. This means the cache persists across requests without needing external storage like Redis. - -**In traditional serverless:** Each invocation runs in isolation, so consider Redis for cross-process caching. - -Reference: [https://github.com/isaacs/node-lru-cache](https://github.com/isaacs/node-lru-cache) - -### 3.2 Minimize Serialization at RSC Boundaries - -**Impact: HIGH (reduces data transfer size)** - -The React Server/Client boundary serializes all object properties into strings and embeds them in the HTML response and subsequent RSC requests. This serialized data directly impacts page weight and load time, so **size matters a lot**. Only pass fields that the client actually uses. - -**Incorrect: serializes all 50 fields** - -```tsx -async function Page() { - const user = await fetchUser() // 50 fields - return -} - -'use client' -function Profile({ user }: { user: User }) { - return
{user.name}
// uses 1 field -} -``` - -**Correct: serializes only 1 field** - -```tsx -async function Page() { - const user = await fetchUser() - return -} - -'use client' -function Profile({ name }: { name: string }) { - return
{name}
-} -``` - -### 3.3 Parallel Data Fetching with Component Composition - -**Impact: CRITICAL (eliminates server-side waterfalls)** - -React Server Components execute sequentially within a tree. Restructure with composition to parallelize data fetching. - -**Incorrect: Sidebar waits for Page's fetch to complete** - -```tsx -export default async function Page() { - const header = await fetchHeader() - return ( -
-
{header}
- -
- ) -} - -async function Sidebar() { - const items = await fetchSidebarItems() - return -} -``` - -**Correct: both fetch simultaneously** - -```tsx -async function Header() { - const data = await fetchHeader() - return
{data}
-} - -async function Sidebar() { - const items = await fetchSidebarItems() - return -} - -export default function Page() { - return ( -
-
- -
- ) -} -``` - -**Alternative with children prop:** - -```tsx -async function Header() { - const data = await fetchHeader() - return
{data}
-} - -async function Sidebar() { - const items = await fetchSidebarItems() - return -} - -function Layout({ children }: { children: ReactNode }) { - return ( -
-
- {children} -
- ) -} - -export default function Page() { - return ( - - - - ) -} -``` - -### 3.4 Per-Request Deduplication with React.cache() - -**Impact: MEDIUM (deduplicates within request)** - -Use `React.cache()` for server-side request deduplication. Authentication and database queries benefit most. - -**Usage:** - -```typescript -import { cache } from 'react' - -export const getCurrentUser = cache(async () => { - const session = await auth() - if (!session?.user?.id) return null - return await db.user.findUnique({ - where: { id: session.user.id } - }) -}) -``` - -Within a single request, multiple calls to `getCurrentUser()` execute the query only once. - -**Avoid inline objects as arguments:** - -`React.cache()` uses shallow equality (`Object.is`) to determine cache hits. Inline objects create new references each call, preventing cache hits. - -**Incorrect: always cache miss** - -```typescript -const getUser = cache(async (params: { uid: number }) => { - return await db.user.findUnique({ where: { id: params.uid } }) -}) - -// Each call creates new object, never hits cache -getUser({ uid: 1 }) -getUser({ uid: 1 }) // Cache miss, runs query again -``` - -**Correct: cache hit** - -```typescript -const params = { uid: 1 } -getUser(params) // Query runs -getUser(params) // Cache hit (same reference) -``` - -If you must pass objects, pass the same reference: - -**Next.js-Specific Note:** - -In Next.js, the `fetch` API is automatically extended with request memoization. Requests with the same URL and options are automatically deduplicated within a single request, so you don't need `React.cache()` for `fetch` calls. However, `React.cache()` is still essential for other async tasks: - -- Database queries (Prisma, Drizzle, etc.) - -- Heavy computations - -- Authentication checks - -- File system operations - -- Any non-fetch async work - -Use `React.cache()` to deduplicate these operations across your component tree. - -Reference: [https://react.dev/reference/react/cache](https://react.dev/reference/react/cache) - -### 3.5 Use after() for Non-Blocking Operations - -**Impact: MEDIUM (faster response times)** - -Use Next.js's `after()` to schedule work that should execute after a response is sent. This prevents logging, analytics, and other side effects from blocking the response. - -**Incorrect: blocks response** - -```tsx -import { logUserAction } from '@/app/utils' - -export async function POST(request: Request) { - // Perform mutation - await updateDatabase(request) - - // Logging blocks the response - const userAgent = request.headers.get('user-agent') || 'unknown' - await logUserAction({ userAgent }) - - return new Response(JSON.stringify({ status: 'success' }), { - status: 200, - headers: { 'Content-Type': 'application/json' } - }) -} -``` - -**Correct: non-blocking** - -```tsx -import { after } from 'next/server' -import { headers, cookies } from 'next/headers' -import { logUserAction } from '@/app/utils' - -export async function POST(request: Request) { - // Perform mutation - await updateDatabase(request) - - // Log after response is sent - after(async () => { - const userAgent = (await headers()).get('user-agent') || 'unknown' - const sessionCookie = (await cookies()).get('session-id')?.value || 'anonymous' - - logUserAction({ sessionCookie, userAgent }) - }) - - return new Response(JSON.stringify({ status: 'success' }), { - status: 200, - headers: { 'Content-Type': 'application/json' } - }) -} -``` - -The response is sent immediately while logging happens in the background. - -**Common use cases:** - -- Analytics tracking - -- Audit logging - -- Sending notifications - -- Cache invalidation - -- Cleanup tasks - -**Important notes:** - -- `after()` runs even if the response fails or redirects - -- Works in Server Actions, Route Handlers, and Server Components - -Reference: [https://nextjs.org/docs/app/api-reference/functions/after](https://nextjs.org/docs/app/api-reference/functions/after) - ---- - -## 4. Client-Side Data Fetching - -**Impact: MEDIUM-HIGH** - -Automatic deduplication and efficient data fetching patterns reduce redundant network requests. - -### 4.1 Deduplicate Global Event Listeners - -**Impact: LOW (single listener for N components)** - -Use `useSWRSubscription()` to share global event listeners across component instances. - -**Incorrect: N instances = N listeners** - -```tsx -function useKeyboardShortcut(key: string, callback: () => void) { - useEffect(() => { - const handler = (e: KeyboardEvent) => { - if (e.metaKey && e.key === key) { - callback() - } - } - window.addEventListener('keydown', handler) - return () => window.removeEventListener('keydown', handler) - }, [key, callback]) -} -``` - -When using the `useKeyboardShortcut` hook multiple times, each instance will register a new listener. - -**Correct: N instances = 1 listener** - -```tsx -import useSWRSubscription from 'swr/subscription' - -// Module-level Map to track callbacks per key -const keyCallbacks = new Map void>>() - -function useKeyboardShortcut(key: string, callback: () => void) { - // Register this callback in the Map - useEffect(() => { - if (!keyCallbacks.has(key)) { - keyCallbacks.set(key, new Set()) - } - keyCallbacks.get(key)!.add(callback) - - return () => { - const set = keyCallbacks.get(key) - if (set) { - set.delete(callback) - if (set.size === 0) { - keyCallbacks.delete(key) - } - } - } - }, [key, callback]) - - useSWRSubscription('global-keydown', () => { - const handler = (e: KeyboardEvent) => { - if (e.metaKey && keyCallbacks.has(e.key)) { - keyCallbacks.get(e.key)!.forEach(cb => cb()) - } - } - window.addEventListener('keydown', handler) - return () => window.removeEventListener('keydown', handler) - }) -} - -function Profile() { - // Multiple shortcuts will share the same listener - useKeyboardShortcut('p', () => { /* ... */ }) - useKeyboardShortcut('k', () => { /* ... */ }) - // ... -} -``` - -### 4.2 Use Passive Event Listeners for Scrolling Performance - -**Impact: MEDIUM (eliminates scroll delay caused by event listeners)** - -Add `{ passive: true }` to touch and wheel event listeners to enable immediate scrolling. Browsers normally wait for listeners to finish to check if `preventDefault()` is called, causing scroll delay. - -**Incorrect:** - -```typescript -useEffect(() => { - const handleTouch = (e: TouchEvent) => console.log(e.touches[0].clientX) - const handleWheel = (e: WheelEvent) => console.log(e.deltaY) - - document.addEventListener('touchstart', handleTouch) - document.addEventListener('wheel', handleWheel) - - return () => { - document.removeEventListener('touchstart', handleTouch) - document.removeEventListener('wheel', handleWheel) - } -}, []) -``` - -**Correct:** - -```typescript -useEffect(() => { - const handleTouch = (e: TouchEvent) => console.log(e.touches[0].clientX) - const handleWheel = (e: WheelEvent) => console.log(e.deltaY) - - document.addEventListener('touchstart', handleTouch, { passive: true }) - document.addEventListener('wheel', handleWheel, { passive: true }) - - return () => { - document.removeEventListener('touchstart', handleTouch) - document.removeEventListener('wheel', handleWheel) - } -}, []) -``` - -**Use passive when:** tracking/analytics, logging, any listener that doesn't call `preventDefault()`. - -**Don't use passive when:** implementing custom swipe gestures, custom zoom controls, or any listener that needs `preventDefault()`. - -### 4.3 Use SWR for Automatic Deduplication - -**Impact: MEDIUM-HIGH (automatic deduplication)** - -SWR enables request deduplication, caching, and revalidation across component instances. - -**Incorrect: no deduplication, each instance fetches** - -```tsx -function UserList() { - const [users, setUsers] = useState([]) - useEffect(() => { - fetch('/api/users') - .then(r => r.json()) - .then(setUsers) - }, []) -} -``` - -**Correct: multiple instances share one request** - -```tsx -import useSWR from 'swr' - -function UserList() { - const { data: users } = useSWR('/api/users', fetcher) -} -``` - -**For immutable data:** - -```tsx -import { useImmutableSWR } from '@/lib/swr' - -function StaticContent() { - const { data } = useImmutableSWR('/api/config', fetcher) -} -``` - -**For mutations:** - -```tsx -import { useSWRMutation } from 'swr/mutation' - -function UpdateButton() { - const { trigger } = useSWRMutation('/api/user', updateUser) - return -} -``` - -Reference: [https://swr.vercel.app](https://swr.vercel.app) - -### 4.4 Version and Minimize localStorage Data - -**Impact: MEDIUM (prevents schema conflicts, reduces storage size)** - -Add version prefix to keys and store only needed fields. Prevents schema conflicts and accidental storage of sensitive data. - -**Incorrect:** - -```typescript -// No version, stores everything, no error handling -localStorage.setItem('userConfig', JSON.stringify(fullUserObject)) -const data = localStorage.getItem('userConfig') -``` - -**Correct:** - -```typescript -const VERSION = 'v2' - -function saveConfig(config: { theme: string; language: string }) { - try { - localStorage.setItem(`userConfig:${VERSION}`, JSON.stringify(config)) - } catch { - // Throws in incognito/private browsing, quota exceeded, or disabled - } -} - -function loadConfig() { - try { - const data = localStorage.getItem(`userConfig:${VERSION}`) - return data ? JSON.parse(data) : null - } catch { - return null - } -} - -// Migration from v1 to v2 -function migrate() { - try { - const v1 = localStorage.getItem('userConfig:v1') - if (v1) { - const old = JSON.parse(v1) - saveConfig({ theme: old.darkMode ? 'dark' : 'light', language: old.lang }) - localStorage.removeItem('userConfig:v1') - } - } catch {} -} -``` - -**Store minimal fields from server responses:** - -```typescript -// User object has 20+ fields, only store what UI needs -function cachePrefs(user: FullUser) { - try { - localStorage.setItem('prefs:v1', JSON.stringify({ - theme: user.preferences.theme, - notifications: user.preferences.notifications - })) - } catch {} -} -``` - -**Always wrap in try-catch:** `getItem()` and `setItem()` throw in incognito/private browsing (Safari, Firefox), when quota exceeded, or when disabled. - -**Benefits:** Schema evolution via versioning, reduced storage size, prevents storing tokens/PII/internal flags. - ---- - -## 5. Re-render Optimization - -**Impact: MEDIUM** - -Reducing unnecessary re-renders minimizes wasted computation and improves UI responsiveness. - -### 5.1 Defer State Reads to Usage Point - -**Impact: MEDIUM (avoids unnecessary subscriptions)** - -Don't subscribe to dynamic state (searchParams, localStorage) if you only read it inside callbacks. - -**Incorrect: subscribes to all searchParams changes** - -```tsx -function ShareButton({ chatId }: { chatId: string }) { - const searchParams = useSearchParams() - - const handleShare = () => { - const ref = searchParams.get('ref') - shareChat(chatId, { ref }) - } - - return -} -``` - -**Correct: reads on demand, no subscription** - -```tsx -function ShareButton({ chatId }: { chatId: string }) { - const handleShare = () => { - const params = new URLSearchParams(window.location.search) - const ref = params.get('ref') - shareChat(chatId, { ref }) - } - - return -} -``` - -### 5.2 Extract to Memoized Components - -**Impact: MEDIUM (enables early returns)** - -Extract expensive work into memoized components to enable early returns before computation. - -**Incorrect: computes avatar even when loading** - -```tsx -function Profile({ user, loading }: Props) { - const avatar = useMemo(() => { - const id = computeAvatarId(user) - return - }, [user]) - - if (loading) return - return
{avatar}
-} -``` - -**Correct: skips computation when loading** - -```tsx -const UserAvatar = memo(function UserAvatar({ user }: { user: User }) { - const id = useMemo(() => computeAvatarId(user), [user]) - return -}) - -function Profile({ user, loading }: Props) { - if (loading) return - return ( -
- -
- ) -} -``` - -**Note:** If your project has [React Compiler](https://react.dev/learn/react-compiler) enabled, manual memoization with `memo()` and `useMemo()` is not necessary. The compiler automatically optimizes re-renders. - -### 5.3 Narrow Effect Dependencies - -**Impact: LOW (minimizes effect re-runs)** - -Specify primitive dependencies instead of objects to minimize effect re-runs. - -**Incorrect: re-runs on any user field change** - -```tsx -useEffect(() => { - console.log(user.id) -}, [user]) -``` - -**Correct: re-runs only when id changes** - -```tsx -useEffect(() => { - console.log(user.id) -}, [user.id]) -``` - -**For derived state, compute outside effect:** - -```tsx -// Incorrect: runs on width=767, 766, 765... -useEffect(() => { - if (width < 768) { - enableMobileMode() - } -}, [width]) - -// Correct: runs only on boolean transition -const isMobile = width < 768 -useEffect(() => { - if (isMobile) { - enableMobileMode() - } -}, [isMobile]) -``` - -### 5.4 Subscribe to Derived State - -**Impact: MEDIUM (reduces re-render frequency)** - -Subscribe to derived boolean state instead of continuous values to reduce re-render frequency. - -**Incorrect: re-renders on every pixel change** - -```tsx -function Sidebar() { - const width = useWindowWidth() // updates continuously - const isMobile = width < 768 - return