diff --git a/api/.env.example b/api/.env.example index c1089c88ca..5095bd3832 100644 --- a/api/.env.example +++ b/api/.env.example @@ -42,6 +42,11 @@ REDIS_SENTINEL_USERNAME= REDIS_SENTINEL_PASSWORD= REDIS_SENTINEL_SOCKET_TIMEOUT=0.1 +# redis Cluster configuration. +REDIS_USE_CLUSTERS=false +REDIS_CLUSTERS= +REDIS_CLUSTERS_PASSWORD= + # PostgreSQL database configuration DB_USERNAME=postgres DB_PASSWORD=difyai123456 @@ -234,6 +239,10 @@ ANALYTICDB_ACCOUNT=testaccount ANALYTICDB_PASSWORD=testpassword ANALYTICDB_NAMESPACE=dify ANALYTICDB_NAMESPACE_PASSWORD=difypassword +ANALYTICDB_HOST=gp-test.aliyuncs.com +ANALYTICDB_PORT=5432 +ANALYTICDB_MIN_CONNECTION=1 +ANALYTICDB_MAX_CONNECTION=5 # OpenSearch configuration OPENSEARCH_HOST=127.0.0.1 diff --git a/api/commands.py b/api/commands.py index 64238698f8..17fb94b93d 100644 --- a/api/commands.py +++ b/api/commands.py @@ -590,7 +590,7 @@ def upgrade_db(): click.echo(click.style("Database migration successful!", fg="green")) except Exception as e: - logging.exception(f"Database migration failed: {e}") + logging.exception("Failed to execute database migration") finally: lock.release() else: @@ -634,7 +634,7 @@ where sites.id is null limit 1000""" except Exception as e: failed_app_ids.append(app_id) click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red")) - logging.exception(f"Fix app related site missing issue failed, error: {e}") + logging.exception(f"Failed to fix app related site missing issue, app_id: {app_id}") continue if not processed_count: diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 6641861329..7b7433e6b9 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -674,6 +674,11 @@ class DataSetConfig(BaseSettings): default=False, ) + PLAN_SANDBOX_CLEAN_MESSAGE_DAY_SETTING: PositiveInt = Field( + description="Interval in days for message cleanup operations - plan: sandbox", + default=30, + ) + class WorkspaceConfig(BaseSettings): """ diff --git a/api/configs/middleware/cache/redis_config.py b/api/configs/middleware/cache/redis_config.py index 26b9b1347c..2e98c31ec3 100644 --- a/api/configs/middleware/cache/redis_config.py +++ b/api/configs/middleware/cache/redis_config.py @@ -68,3 +68,18 @@ class RedisConfig(BaseSettings): description="Socket timeout in seconds for Redis Sentinel connections", default=0.1, ) + + REDIS_USE_CLUSTERS: bool = Field( + description="Enable Redis Clusters mode for high availability", + default=False, + ) + + REDIS_CLUSTERS: Optional[str] = Field( + description="Comma-separated list of Redis Clusters nodes (host:port)", + default=None, + ) + + REDIS_CLUSTERS_PASSWORD: Optional[str] = Field( + description="Password for Redis Clusters authentication (if required)", + default=None, + ) diff --git a/api/configs/middleware/vdb/analyticdb_config.py b/api/configs/middleware/vdb/analyticdb_config.py index 247a8ea555..53cfaae43e 100644 --- a/api/configs/middleware/vdb/analyticdb_config.py +++ b/api/configs/middleware/vdb/analyticdb_config.py @@ -1,6 +1,6 @@ from typing import Optional -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, PositiveInt class AnalyticdbConfig(BaseModel): @@ -40,3 +40,11 @@ class AnalyticdbConfig(BaseModel): description="The password for accessing the specified namespace within the AnalyticDB instance" " (if namespace feature is enabled).", ) + ANALYTICDB_HOST: Optional[str] = Field( + default=None, description="The host of the AnalyticDB instance you want to connect to." + ) + ANALYTICDB_PORT: PositiveInt = Field( + default=5432, description="The port of the AnalyticDB instance you want to connect to." + ) + ANALYTICDB_MIN_CONNECTION: PositiveInt = Field(default=1, description="Min connection of the AnalyticDB database.") + ANALYTICDB_MAX_CONNECTION: PositiveInt = Field(default=5, description="Max connection of the AnalyticDB database.") diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py index 65065efbc0..1f2b8224e8 100644 --- a/api/configs/packaging/__init__.py +++ b/api/configs/packaging/__init__.py @@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings): CURRENT_VERSION: str = Field( description="Dify version", - default="0.11.1", + default="0.11.2", ) COMMIT_SHA: str = Field( diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index 8cc3cf98c2..4ed74e2b08 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -9,6 +9,7 @@ from controllers.console.app.wraps import get_app_model from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + enterprise_license_required, setup_required, ) from core.model_runtime.utils.encoders import jsonable_encoder @@ -29,6 +30,7 @@ class AppListApi(Resource): @setup_required @login_required @account_initialization_required + @enterprise_license_required def get(self): """Get app list""" @@ -188,6 +190,7 @@ class AppApi(Resource): @setup_required @login_required @account_initialization_required + @enterprise_license_required @get_app_model @marshal_with(app_detail_fields_with_site) def get(self, app_model): diff --git a/api/controllers/console/app/audio.py b/api/controllers/console/app/audio.py index 112446613f..695b8890e3 100644 --- a/api/controllers/console/app/audio.py +++ b/api/controllers/console/app/audio.py @@ -70,7 +70,7 @@ class ChatMessageAudioApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception(f"internal server error, {str(e)}.") + logging.exception("Failed to handle post request to ChatMessageAudioApi") raise InternalServerError() @@ -128,7 +128,7 @@ class ChatMessageTextApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception(f"internal server error, {str(e)}.") + logging.exception("Failed to handle post request to ChatMessageTextApi") raise InternalServerError() @@ -170,7 +170,7 @@ class TextModesApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception(f"internal server error, {str(e)}.") + logging.exception("Failed to handle get request to TextModesApi") raise InternalServerError() diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py index 0cc115d0ee..a0b9faa781 100644 --- a/api/controllers/console/auth/forgot_password.py +++ b/api/controllers/console/auth/forgot_password.py @@ -14,7 +14,7 @@ from controllers.console.auth.error import ( InvalidTokenError, PasswordMismatchError, ) -from controllers.console.error import EmailSendIpLimitError, NotAllowedRegister +from controllers.console.error import AccountNotFound, EmailSendIpLimitError from controllers.console.wraps import setup_required from events.tenant_event import tenant_was_created from extensions.ext_database import db @@ -51,7 +51,7 @@ class ForgotPasswordSendEmailApi(Resource): token = AccountService.send_reset_password_email(email=args["email"], language=language) return {"result": "fail", "data": token, "code": "account_not_found"} else: - raise NotAllowedRegister() + raise AccountNotFound() else: token = AccountService.send_reset_password_email(account=account, email=args["email"], language=language) diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index e2e8f84920..f4463ce9cb 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -16,9 +16,9 @@ from controllers.console.auth.error import ( ) from controllers.console.error import ( AccountBannedError, + AccountNotFound, EmailSendIpLimitError, NotAllowedCreateWorkspace, - NotAllowedRegister, ) from controllers.console.wraps import setup_required from events.tenant_event import tenant_was_created @@ -76,7 +76,7 @@ class LoginApi(Resource): token = AccountService.send_reset_password_email(email=args["email"], language=language) return {"result": "fail", "data": token, "code": "account_not_found"} else: - raise NotAllowedRegister() + raise AccountNotFound() # SELF_HOSTED only have one workspace tenants = TenantService.get_join_tenants(account) if len(tenants) == 0: @@ -119,7 +119,7 @@ class ResetPasswordSendEmailApi(Resource): if FeatureService.get_system_features().is_allow_register: token = AccountService.send_reset_password_email(email=args["email"], language=language) else: - raise NotAllowedRegister() + raise AccountNotFound() else: token = AccountService.send_reset_password_email(account=account, language=language) @@ -148,7 +148,7 @@ class EmailCodeLoginSendEmailApi(Resource): if FeatureService.get_system_features().is_allow_register: token = AccountService.send_email_code_login_email(email=args["email"], language=language) else: - raise NotAllowedRegister() + raise AccountNotFound() else: token = AccountService.send_email_code_login_email(account=account, language=language) diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 82163a32ee..95d4013e3a 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -10,7 +10,7 @@ from controllers.console import api from controllers.console.apikey import api_key_fields, api_key_list from controllers.console.app.error import ProviderNotInitializeError from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError from core.indexing_runner import IndexingRunner from core.model_runtime.entities.model_entities import ModelType @@ -44,6 +44,7 @@ class DatasetListApi(Resource): @setup_required @login_required @account_initialization_required + @enterprise_license_required def get(self): page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 3d010a861a..5393ef8bc1 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -959,7 +959,7 @@ class DocumentRetryApi(DocumentResource): raise DocumentAlreadyFinishedError() retry_documents.append(document) except Exception as e: - logging.exception(f"Document {document_id} retry failed: {str(e)}") + logging.exception(f"Failed to retry document, document id: {document_id}") continue # retry document DocumentService.retry_document(dataset_id, retry_documents) diff --git a/api/controllers/console/error.py b/api/controllers/console/error.py index e0630ca66c..1b4e6deae6 100644 --- a/api/controllers/console/error.py +++ b/api/controllers/console/error.py @@ -52,8 +52,8 @@ class AccountBannedError(BaseHTTPException): code = 400 -class NotAllowedRegister(BaseHTTPException): - error_code = "unauthorized" +class AccountNotFound(BaseHTTPException): + error_code = "account_not_found" description = "Account not found." code = 400 @@ -86,3 +86,9 @@ class NoFileUploadedError(BaseHTTPException): error_code = "no_file_uploaded" description = "Please upload your file." code = 400 + + +class UnauthorizedAndForceLogout(BaseHTTPException): + error_code = "unauthorized_and_force_logout" + description = "Unauthorized and force logout." + code = 401 diff --git a/api/controllers/console/remote_files.py b/api/controllers/console/remote_files.py index 9b899bef64..fac1341b39 100644 --- a/api/controllers/console/remote_files.py +++ b/api/controllers/console/remote_files.py @@ -45,7 +45,7 @@ class RemoteFileUploadApi(Resource): resp = ssrf_proxy.head(url=url) if resp.status_code != httpx.codes.OK: - resp = ssrf_proxy.get(url=url, timeout=3) + resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True) resp.raise_for_status() file_info = helpers.guess_file_info_from_response(resp) diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index aabc417759..750f65168f 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -14,7 +14,7 @@ from controllers.console.workspace.error import ( InvalidInvitationCodeError, RepeatPasswordNotMatchError, ) -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required from extensions.ext_database import db from fields.member_fields import account_fields from libs.helper import TimestampField, timezone @@ -79,6 +79,7 @@ class AccountProfileApi(Resource): @login_required @account_initialization_required @marshal_with(account_fields) + @enterprise_license_required def get(self): return current_user diff --git a/api/controllers/console/workspace/members.py b/api/controllers/console/workspace/members.py index 8f694c65e0..38ed2316a5 100644 --- a/api/controllers/console/workspace/members.py +++ b/api/controllers/console/workspace/members.py @@ -1,3 +1,5 @@ +from urllib import parse + from flask_login import current_user from flask_restful import Resource, abort, marshal_with, reqparse @@ -57,11 +59,12 @@ class MemberInviteEmailApi(Resource): token = RegisterService.invite_new_member( inviter.current_tenant, invitee_email, interface_language, role=invitee_role, inviter=inviter ) + encoded_invitee_email = parse.quote(invitee_email) invitation_results.append( { "status": "success", "email": invitee_email, - "url": f"{console_web_url}/activate?email={invitee_email}&token={token}", + "url": f"{console_web_url}/activate?email={encoded_invitee_email}&token={token}", } ) except AccountAlreadyInTenantError: diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py index daa3455e2f..a03e875671 100644 --- a/api/controllers/console/workspace/models.py +++ b/api/controllers/console/workspace/models.py @@ -72,7 +72,10 @@ class DefaultModelApi(Resource): model=model_setting["model"], ) except Exception as ex: - logging.exception(f"{model_setting['model_type']} save error: {ex}") + logging.exception( + f"Failed to update default model, model type: {model_setting['model_type']}," + f" model:{model_setting.get('model')}" + ) raise ex return {"result": "success"} @@ -156,7 +159,10 @@ class ModelProviderModelApi(Resource): credentials=args["credentials"], ) except CredentialsValidateFailedError as ex: - logging.exception(f"save model credentials error: {ex}") + logging.exception( + f"Failed to save model credentials, tenant_id: {tenant_id}," + f" model: {args.get('model')}, model_type: {args.get('model_type')}" + ) raise ValueError(str(ex)) return {"result": "success"}, 200 diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index ad8dee09f9..e5fcd19754 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -7,7 +7,7 @@ from werkzeug.exceptions import Forbidden from configs import dify_config from controllers.console import api -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder from libs.helper import alphanumeric, uuid_value from libs.login import login_required @@ -608,6 +608,7 @@ class ToolLabelsApi(Resource): @setup_required @login_required @account_initialization_required + @enterprise_license_required def get(self): return jsonable_encoder(ToolLabelsService.list_tool_labels()) diff --git a/api/controllers/console/wraps.py b/api/controllers/console/wraps.py index 291e2500aa..0fbf450046 100644 --- a/api/controllers/console/wraps.py +++ b/api/controllers/console/wraps.py @@ -9,10 +9,10 @@ from configs import dify_config from controllers.console.workspace.error import AccountNotInitializedError from extensions.ext_database import db from models.model import DifySetup -from services.feature_service import FeatureService +from services.feature_service import FeatureService, LicenseStatus from services.operation_service import OperationService -from .error import NotInitValidateError, NotSetupError +from .error import NotInitValidateError, NotSetupError, UnauthorizedAndForceLogout def account_initialization_required(view): @@ -147,3 +147,15 @@ def setup_required(view): return view(*args, **kwargs) return decorated + + +def enterprise_license_required(view): + @wraps(view) + def decorated(*args, **kwargs): + settings = FeatureService.get_system_features() + if settings.license.status in [LicenseStatus.INACTIVE, LicenseStatus.EXPIRED, LicenseStatus.LOST]: + raise UnauthorizedAndForceLogout("Your license is invalid. Please contact your administrator.") + + return view(*args, **kwargs) + + return decorated diff --git a/api/controllers/web/audio.py b/api/controllers/web/audio.py index 23550efe2e..e8521307ad 100644 --- a/api/controllers/web/audio.py +++ b/api/controllers/web/audio.py @@ -59,7 +59,7 @@ class AudioApi(WebApiResource): except ValueError as e: raise e except Exception as e: - logging.exception(f"internal server error: {str(e)}") + logging.exception("Failed to handle post request to AudioApi") raise InternalServerError() @@ -117,7 +117,7 @@ class TextApi(WebApiResource): except ValueError as e: raise e except Exception as e: - logging.exception(f"internal server error: {str(e)}") + logging.exception("Failed to handle post request to TextApi") raise InternalServerError() diff --git a/api/core/app/app_config/features/file_upload/manager.py b/api/core/app/app_config/features/file_upload/manager.py index 2043ea0e41..0dc4efc47a 100644 --- a/api/core/app/app_config/features/file_upload/manager.py +++ b/api/core/app/app_config/features/file_upload/manager.py @@ -16,9 +16,7 @@ class FileUploadConfigManager: file_upload_dict = config.get("file_upload") if file_upload_dict: if file_upload_dict.get("enabled"): - transform_methods = file_upload_dict.get("allowed_file_upload_methods") or file_upload_dict.get( - "allowed_upload_methods", [] - ) + transform_methods = file_upload_dict.get("allowed_file_upload_methods", []) data = { "image_config": { "number_limits": file_upload_dict["number_limits"], diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index 749175534b..4cb5e713e6 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -373,5 +373,5 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): if e.args[0] == "I/O operation on closed file.": # ignore this error raise GenerateTaskStoppedError() else: - logger.exception(e) + logger.exception(f"Failed to process generate task pipeline, conversation_id: {conversation.id}") raise e diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index 1d4c0ea0fa..e1798957b9 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -242,7 +242,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc start_listener_time = time.time() yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) except Exception as e: - logger.exception(e) + logger.exception(f"Failed to listen audio message, task_id: {task_id}") break if tts_publisher: yield MessageAudioEndStreamResponse(audio="", task_id=task_id) diff --git a/api/core/app/apps/base_app_generator.py b/api/core/app/apps/base_app_generator.py index 667c2a9c6b..527aeed6a9 100644 --- a/api/core/app/apps/base_app_generator.py +++ b/api/core/app/apps/base_app_generator.py @@ -34,8 +34,8 @@ class BaseAppGenerator: tenant_id=app_config.tenant_id, config=FileUploadConfig( allowed_file_types=entity_dictionary[k].allowed_file_types, - allowed_extensions=entity_dictionary[k].allowed_file_extensions, - allowed_upload_methods=entity_dictionary[k].allowed_file_upload_methods, + allowed_file_extensions=entity_dictionary[k].allowed_file_extensions, + allowed_file_upload_methods=entity_dictionary[k].allowed_file_upload_methods, ), ) for k, v in user_inputs.items() @@ -48,8 +48,8 @@ class BaseAppGenerator: tenant_id=app_config.tenant_id, config=FileUploadConfig( allowed_file_types=entity_dictionary[k].allowed_file_types, - allowed_extensions=entity_dictionary[k].allowed_file_extensions, - allowed_upload_methods=entity_dictionary[k].allowed_file_upload_methods, + allowed_file_extensions=entity_dictionary[k].allowed_file_extensions, + allowed_file_upload_methods=entity_dictionary[k].allowed_file_upload_methods, ), ) for k, v in user_inputs.items() @@ -92,6 +92,9 @@ class BaseAppGenerator: ) if variable_entity.type == VariableEntityType.NUMBER and isinstance(value, str): + # handle empty string case + if not value.strip(): + return None # may raise ValueError if user_input_value is not a valid number try: if "." in value: diff --git a/api/core/app/apps/message_based_app_generator.py b/api/core/app/apps/message_based_app_generator.py index bae64368e3..da206f01e7 100644 --- a/api/core/app/apps/message_based_app_generator.py +++ b/api/core/app/apps/message_based_app_generator.py @@ -80,7 +80,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): if e.args[0] == "I/O operation on closed file.": # ignore this error raise GenerateTaskStoppedError() else: - logger.exception(e) + logger.exception(f"Failed to handle response, conversation_id: {conversation.id}") raise e def _get_conversation_by_user( diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index adeddacd0f..792cec7be6 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -310,5 +310,7 @@ class WorkflowAppGenerator(BaseAppGenerator): if e.args[0] == "I/O operation on closed file.": # ignore this error raise GenerateTaskStoppedError() else: - logger.exception(e) + logger.exception( + f"Fails to process generate task pipeline, task_id: {application_generate_entity.task_id}" + ) raise e diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index aaa4824fe8..9e4921d6a2 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -216,7 +216,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa else: yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) except Exception as e: - logger.exception(e) + logger.exception(f"Fails to get audio trunk, task_id: {task_id}") break if tts_publisher: yield MessageAudioEndStreamResponse(audio="", task_id=task_id) diff --git a/api/core/app/task_pipeline/message_cycle_manage.py b/api/core/app/task_pipeline/message_cycle_manage.py index 236eebf0b8..e818a090ed 100644 --- a/api/core/app/task_pipeline/message_cycle_manage.py +++ b/api/core/app/task_pipeline/message_cycle_manage.py @@ -86,7 +86,7 @@ class MessageCycleManage: conversation.name = name except Exception as e: if dify_config.DEBUG: - logging.exception(f"generate conversation name failed: {e}") + logging.exception(f"generate conversation name failed, conversation_id: {conversation_id}") pass db.session.merge(conversation) diff --git a/api/core/file/models.py b/api/core/file/models.py index 0142893787..3e7e189c62 100644 --- a/api/core/file/models.py +++ b/api/core/file/models.py @@ -28,8 +28,8 @@ class FileUploadConfig(BaseModel): image_config: Optional[ImageConfig] = None allowed_file_types: Sequence[FileType] = Field(default_factory=list) - allowed_extensions: Sequence[str] = Field(default_factory=list) - allowed_upload_methods: Sequence[FileTransferMethod] = Field(default_factory=list) + allowed_file_extensions: Sequence[str] = Field(default_factory=list) + allowed_file_upload_methods: Sequence[FileTransferMethod] = Field(default_factory=list) number_limits: int = 0 diff --git a/api/core/helper/moderation.py b/api/core/helper/moderation.py index 434f4205e8..899d2b6994 100644 --- a/api/core/helper/moderation.py +++ b/api/core/helper/moderation.py @@ -55,7 +55,7 @@ def check_moderation(tenant_id: str, model_config: ModelConfigWithCredentialsEnt if moderation_result is True: return True except Exception as ex: - logger.exception(ex) + logger.exception(f"Fails to check moderation, provider_name: {provider_name}") raise InvokeBadRequestError("Rate limit exceeded, please try again later.") return False diff --git a/api/core/helper/module_import_helper.py b/api/core/helper/module_import_helper.py index e6e1491548..1e2fefce88 100644 --- a/api/core/helper/module_import_helper.py +++ b/api/core/helper/module_import_helper.py @@ -29,7 +29,7 @@ def import_module_from_source(*, module_name: str, py_file_path: AnyStr, use_laz spec.loader.exec_module(module) return module except Exception as e: - logging.exception(f"Failed to load module {module_name} from {py_file_path}: {str(e)}") + logging.exception(f"Failed to load module {module_name} from script file '{py_file_path}'") raise e diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py index 64d73c7cd9..2e457dd530 100644 --- a/api/core/helper/ssrf_proxy.py +++ b/api/core/helper/ssrf_proxy.py @@ -39,6 +39,7 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): ) retries = 0 + stream = kwargs.pop("stream", False) while retries <= max_retries: try: if dify_config.SSRF_PROXY_ALL_URL: @@ -52,6 +53,8 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): response = client.request(method=method, url=url, **kwargs) if response.status_code not in STATUS_FORCELIST: + if stream: + return response.iter_bytes() return response else: logging.warning( diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index e2a94073cf..7db8f54f70 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -29,6 +29,7 @@ from core.rag.splitter.fixed_text_splitter import ( FixedRecursiveCharacterTextSplitter, ) from core.rag.splitter.text_splitter import TextSplitter +from core.tools.utils.text_processing_utils import remove_leading_symbols from extensions.ext_database import db from extensions.ext_redis import redis_client from extensions.ext_storage import storage @@ -500,11 +501,7 @@ class IndexingRunner: document_node.metadata["doc_hash"] = hash # delete Splitter character page_content = document_node.page_content - if page_content.startswith(".") or page_content.startswith("。"): - page_content = page_content[1:] - else: - page_content = page_content - document_node.page_content = page_content + document_node.page_content = remove_leading_symbols(page_content) if document_node.page_content: split_documents.append(document_node) @@ -554,7 +551,7 @@ class IndexingRunner: qa_documents.append(qa_document) format_documents.extend(qa_documents) except Exception as e: - logging.exception(e) + logging.exception("Failed to format qa document") all_qa_documents.extend(format_documents) diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index 9cf9ed75c0..3a92c8d9d2 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -102,7 +102,7 @@ class LLMGenerator: except InvokeError: questions = [] except Exception as e: - logging.exception(e) + logging.exception("Failed to generate suggested questions after answer") questions = [] return questions @@ -148,7 +148,7 @@ class LLMGenerator: error = str(e) error_step = "generate rule config" except Exception as e: - logging.exception(e) + logging.exception(f"Failed to generate rule config, model: {model_config.get('name')}") rule_config["error"] = str(e) rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else "" @@ -234,7 +234,7 @@ class LLMGenerator: error_step = "generate conversation opener" except Exception as e: - logging.exception(e) + logging.exception(f"Failed to generate rule config, model: {model_config.get('name')}") rule_config["error"] = str(e) rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else "" @@ -286,7 +286,9 @@ class LLMGenerator: error = str(e) return {"code": "", "language": code_language, "error": f"Failed to generate code. Error: {error}"} except Exception as e: - logging.exception(e) + logging.exception( + f"Failed to invoke LLM model, model: {model_config.get('name')}, language: {code_language}" + ) return {"code": "", "language": code_language, "error": f"An unexpected error occurred: {str(e)}"} @classmethod diff --git a/api/core/model_runtime/model_providers/gitee_ai/_assets/Gitee-AI-Logo-full.svg b/api/core/model_runtime/model_providers/gitee_ai/_assets/Gitee-AI-Logo-full.svg deleted file mode 100644 index f9738b585b..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/_assets/Gitee-AI-Logo-full.svg +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/api/core/model_runtime/model_providers/gitee_ai/_assets/Gitee-AI-Logo.svg b/api/core/model_runtime/model_providers/gitee_ai/_assets/Gitee-AI-Logo.svg deleted file mode 100644 index 1f51187f19..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/_assets/Gitee-AI-Logo.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/api/core/model_runtime/model_providers/gitee_ai/_common.py b/api/core/model_runtime/model_providers/gitee_ai/_common.py deleted file mode 100644 index 0750f3b75d..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/_common.py +++ /dev/null @@ -1,47 +0,0 @@ -from dashscope.common.error import ( - AuthenticationError, - InvalidParameter, - RequestFailure, - ServiceUnavailableError, - UnsupportedHTTPMethod, - UnsupportedModel, -) - -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) - - -class _CommonGiteeAI: - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - The key is the error type thrown to the caller - The value is the error type thrown by the model, - which needs to be converted into a unified error type for the caller. - - :return: Invoke error mapping - """ - return { - InvokeConnectionError: [ - RequestFailure, - ], - InvokeServerUnavailableError: [ - ServiceUnavailableError, - ], - InvokeRateLimitError: [], - InvokeAuthorizationError: [ - AuthenticationError, - ], - InvokeBadRequestError: [ - InvalidParameter, - UnsupportedModel, - UnsupportedHTTPMethod, - ], - } diff --git a/api/core/model_runtime/model_providers/gitee_ai/gitee_ai.py b/api/core/model_runtime/model_providers/gitee_ai/gitee_ai.py deleted file mode 100644 index 14aa811905..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/gitee_ai.py +++ /dev/null @@ -1,36 +0,0 @@ -import logging - -import requests - -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class GiteeAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - api_key = credentials.get("api_key") - if not api_key: - raise CredentialsValidateFailedError("Credentials validation failed: api_key not given") - - # send a get request to validate the credentials - headers = {"Authorization": f"Bearer {api_key}"} - response = requests.get("https://ai.gitee.com/api/base/account/me", headers=headers, timeout=(10, 300)) - - if response.status_code != 200: - raise CredentialsValidateFailedError( - f"Credentials validation failed with status code {response.status_code}" - ) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/gitee_ai/gitee_ai.yaml b/api/core/model_runtime/model_providers/gitee_ai/gitee_ai.yaml deleted file mode 100644 index 7f7d0f2e53..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/gitee_ai.yaml +++ /dev/null @@ -1,35 +0,0 @@ -provider: gitee_ai -label: - en_US: Gitee AI - zh_Hans: Gitee AI -description: - en_US: 快速体验大模型,领先探索 AI 开源世界 - zh_Hans: 快速体验大模型,领先探索 AI 开源世界 -icon_small: - en_US: Gitee-AI-Logo.svg -icon_large: - en_US: Gitee-AI-Logo-full.svg -help: - title: - en_US: Get your token from Gitee AI - zh_Hans: 从 Gitee AI 获取 token - url: - en_US: https://ai.gitee.com/dashboard/settings/tokens -supported_model_types: - - llm - - text-embedding - - rerank - - speech2text - - tts -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2-72B-Instruct.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2-72B-Instruct.yaml deleted file mode 100644 index 0348438a75..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2-72B-Instruct.yaml +++ /dev/null @@ -1,105 +0,0 @@ -model: Qwen2-72B-Instruct -label: - zh_Hans: Qwen2-72B-Instruct - en_US: Qwen2-72B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 6400 -parameter_rules: - - name: stream - use_template: boolean - label: - en_US: "Stream" - zh_Hans: "流式" - type: boolean - default: true - required: true - help: - en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." - zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" - - - name: max_tokens - use_template: max_tokens - label: - en_US: "Max Tokens" - zh_Hans: "最大Token数" - type: int - default: 512 - min: 1 - required: true - help: - en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." - zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" - - - name: temperature - use_template: temperature - label: - en_US: "Temperature" - zh_Hans: "采样温度" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_p - use_template: top_p - label: - en_US: "Top P" - zh_Hans: "Top P" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_k - use_template: top_k - label: - en_US: "Top K" - zh_Hans: "Top K" - type: int - default: 50 - min: 0 - max: 100 - required: true - help: - en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." - zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" - - - name: frequency_penalty - use_template: frequency_penalty - label: - en_US: "Frequency Penalty" - zh_Hans: "频率惩罚" - type: float - default: 0 - min: -1.0 - max: 1.0 - precision: 1 - required: false - help: - en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." - zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" - - - name: user - use_template: text - label: - en_US: "User" - zh_Hans: "用户" - type: string - required: false - help: - en_US: "Used to track and differentiate conversation requests from different users." - zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2-7B-Instruct.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2-7B-Instruct.yaml deleted file mode 100644 index ba1ad788f5..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2-7B-Instruct.yaml +++ /dev/null @@ -1,105 +0,0 @@ -model: Qwen2-7B-Instruct -label: - zh_Hans: Qwen2-7B-Instruct - en_US: Qwen2-7B-Instruct -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: stream - use_template: boolean - label: - en_US: "Stream" - zh_Hans: "流式" - type: boolean - default: true - required: true - help: - en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." - zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" - - - name: max_tokens - use_template: max_tokens - label: - en_US: "Max Tokens" - zh_Hans: "最大Token数" - type: int - default: 512 - min: 1 - required: true - help: - en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." - zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" - - - name: temperature - use_template: temperature - label: - en_US: "Temperature" - zh_Hans: "采样温度" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_p - use_template: top_p - label: - en_US: "Top P" - zh_Hans: "Top P" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_k - use_template: top_k - label: - en_US: "Top K" - zh_Hans: "Top K" - type: int - default: 50 - min: 0 - max: 100 - required: true - help: - en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." - zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" - - - name: frequency_penalty - use_template: frequency_penalty - label: - en_US: "Frequency Penalty" - zh_Hans: "频率惩罚" - type: float - default: 0 - min: -1.0 - max: 1.0 - precision: 1 - required: false - help: - en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." - zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" - - - name: user - use_template: text - label: - en_US: "User" - zh_Hans: "用户" - type: string - required: false - help: - en_US: "Used to track and differentiate conversation requests from different users." - zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2.5-72B-Instruct.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2.5-72B-Instruct.yaml deleted file mode 100644 index cb300e5ddf..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/llm/Qwen2.5-72B-Instruct.yaml +++ /dev/null @@ -1,95 +0,0 @@ -model: Qwen2.5-72B-Instruct -label: - zh_Hans: Qwen2.5-72B-Instruct - en_US: Qwen2.5-72B-Instruct -model_type: llm -features: - - agent-thought - - tool-call - - stream-tool-call -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: max_tokens - use_template: max_tokens - label: - en_US: "Max Tokens" - zh_Hans: "最大Token数" - type: int - default: 512 - min: 1 - required: true - help: - en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." - zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" - - - name: temperature - use_template: temperature - label: - en_US: "Temperature" - zh_Hans: "采样温度" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_p - use_template: top_p - label: - en_US: "Top P" - zh_Hans: "Top P" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_k - use_template: top_k - label: - en_US: "Top K" - zh_Hans: "Top K" - type: int - default: 50 - min: 0 - max: 100 - required: true - help: - en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." - zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" - - - name: frequency_penalty - use_template: frequency_penalty - label: - en_US: "Frequency Penalty" - zh_Hans: "频率惩罚" - type: float - default: 0 - min: -1.0 - max: 1.0 - precision: 1 - required: false - help: - en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." - zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" - - - name: user - use_template: text - label: - en_US: "User" - zh_Hans: "用户" - type: string - required: false - help: - en_US: "Used to track and differentiate conversation requests from different users." - zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/Yi-1.5-34B-Chat.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/Yi-1.5-34B-Chat.yaml deleted file mode 100644 index f7260c987b..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/llm/Yi-1.5-34B-Chat.yaml +++ /dev/null @@ -1,105 +0,0 @@ -model: Yi-1.5-34B-Chat -label: - zh_Hans: Yi-1.5-34B-Chat - en_US: Yi-1.5-34B-Chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 4096 -parameter_rules: - - name: stream - use_template: boolean - label: - en_US: "Stream" - zh_Hans: "流式" - type: boolean - default: true - required: true - help: - en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." - zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" - - - name: max_tokens - use_template: max_tokens - label: - en_US: "Max Tokens" - zh_Hans: "最大Token数" - type: int - default: 512 - min: 1 - required: true - help: - en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." - zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" - - - name: temperature - use_template: temperature - label: - en_US: "Temperature" - zh_Hans: "采样温度" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_p - use_template: top_p - label: - en_US: "Top P" - zh_Hans: "Top P" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_k - use_template: top_k - label: - en_US: "Top K" - zh_Hans: "Top K" - type: int - default: 50 - min: 0 - max: 100 - required: true - help: - en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." - zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" - - - name: frequency_penalty - use_template: frequency_penalty - label: - en_US: "Frequency Penalty" - zh_Hans: "频率惩罚" - type: float - default: 0 - min: -1.0 - max: 1.0 - precision: 1 - required: false - help: - en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." - zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" - - - name: user - use_template: text - label: - en_US: "User" - zh_Hans: "用户" - type: string - required: false - help: - en_US: "Used to track and differentiate conversation requests from different users." - zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/_position.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/_position.yaml deleted file mode 100644 index 13c31ad02b..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/llm/_position.yaml +++ /dev/null @@ -1,8 +0,0 @@ -- Qwen2.5-72B-Instruct -- Qwen2-7B-Instruct -- Qwen2-72B-Instruct -- Yi-1.5-34B-Chat -- glm-4-9b-chat -- deepseek-coder-33B-instruct-chat -- deepseek-coder-33B-instruct-completions -- codegeex4-all-9b diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/codegeex4-all-9b.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/codegeex4-all-9b.yaml deleted file mode 100644 index 8632cd92ab..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/llm/codegeex4-all-9b.yaml +++ /dev/null @@ -1,105 +0,0 @@ -model: codegeex4-all-9b -label: - zh_Hans: codegeex4-all-9b - en_US: codegeex4-all-9b -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 40960 -parameter_rules: - - name: stream - use_template: boolean - label: - en_US: "Stream" - zh_Hans: "流式" - type: boolean - default: true - required: true - help: - en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." - zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" - - - name: max_tokens - use_template: max_tokens - label: - en_US: "Max Tokens" - zh_Hans: "最大Token数" - type: int - default: 512 - min: 1 - required: true - help: - en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." - zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" - - - name: temperature - use_template: temperature - label: - en_US: "Temperature" - zh_Hans: "采样温度" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_p - use_template: top_p - label: - en_US: "Top P" - zh_Hans: "Top P" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_k - use_template: top_k - label: - en_US: "Top K" - zh_Hans: "Top K" - type: int - default: 50 - min: 0 - max: 100 - required: true - help: - en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." - zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" - - - name: frequency_penalty - use_template: frequency_penalty - label: - en_US: "Frequency Penalty" - zh_Hans: "频率惩罚" - type: float - default: 0 - min: -1.0 - max: 1.0 - precision: 1 - required: false - help: - en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." - zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" - - - name: user - use_template: text - label: - en_US: "User" - zh_Hans: "用户" - type: string - required: false - help: - en_US: "Used to track and differentiate conversation requests from different users." - zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/deepseek-coder-33B-instruct-chat.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/deepseek-coder-33B-instruct-chat.yaml deleted file mode 100644 index 2ac00761d5..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/llm/deepseek-coder-33B-instruct-chat.yaml +++ /dev/null @@ -1,105 +0,0 @@ -model: deepseek-coder-33B-instruct-chat -label: - zh_Hans: deepseek-coder-33B-instruct-chat - en_US: deepseek-coder-33B-instruct-chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 9000 -parameter_rules: - - name: stream - use_template: boolean - label: - en_US: "Stream" - zh_Hans: "流式" - type: boolean - default: true - required: true - help: - en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." - zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" - - - name: max_tokens - use_template: max_tokens - label: - en_US: "Max Tokens" - zh_Hans: "最大Token数" - type: int - default: 512 - min: 1 - required: true - help: - en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." - zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" - - - name: temperature - use_template: temperature - label: - en_US: "Temperature" - zh_Hans: "采样温度" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_p - use_template: top_p - label: - en_US: "Top P" - zh_Hans: "Top P" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_k - use_template: top_k - label: - en_US: "Top K" - zh_Hans: "Top K" - type: int - default: 50 - min: 0 - max: 100 - required: true - help: - en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." - zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" - - - name: frequency_penalty - use_template: frequency_penalty - label: - en_US: "Frequency Penalty" - zh_Hans: "频率惩罚" - type: float - default: 0 - min: -1.0 - max: 1.0 - precision: 1 - required: false - help: - en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." - zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" - - - name: user - use_template: text - label: - en_US: "User" - zh_Hans: "用户" - type: string - required: false - help: - en_US: "Used to track and differentiate conversation requests from different users." - zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/deepseek-coder-33B-instruct-completions.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/deepseek-coder-33B-instruct-completions.yaml deleted file mode 100644 index 7c364d89f7..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/llm/deepseek-coder-33B-instruct-completions.yaml +++ /dev/null @@ -1,91 +0,0 @@ -model: deepseek-coder-33B-instruct-completions -label: - zh_Hans: deepseek-coder-33B-instruct-completions - en_US: deepseek-coder-33B-instruct-completions -model_type: llm -features: - - agent-thought -model_properties: - mode: completion - context_size: 9000 -parameter_rules: - - name: stream - use_template: boolean - label: - en_US: "Stream" - zh_Hans: "流式" - type: boolean - default: true - required: true - help: - en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." - zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" - - - name: max_tokens - use_template: max_tokens - label: - en_US: "Max Tokens" - zh_Hans: "最大Token数" - type: int - default: 512 - min: 1 - required: true - help: - en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." - zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" - - - name: temperature - use_template: temperature - label: - en_US: "Temperature" - zh_Hans: "采样温度" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_p - use_template: top_p - label: - en_US: "Top P" - zh_Hans: "Top P" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: frequency_penalty - use_template: frequency_penalty - label: - en_US: "Frequency Penalty" - zh_Hans: "频率惩罚" - type: float - default: 0 - min: -1.0 - max: 1.0 - precision: 1 - required: false - help: - en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." - zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" - - - name: user - use_template: text - label: - en_US: "User" - zh_Hans: "用户" - type: string - required: false - help: - en_US: "Used to track and differentiate conversation requests from different users." - zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/glm-4-9b-chat.yaml b/api/core/model_runtime/model_providers/gitee_ai/llm/glm-4-9b-chat.yaml deleted file mode 100644 index 2afe1cf959..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/llm/glm-4-9b-chat.yaml +++ /dev/null @@ -1,105 +0,0 @@ -model: glm-4-9b-chat -label: - zh_Hans: glm-4-9b-chat - en_US: glm-4-9b-chat -model_type: llm -features: - - agent-thought -model_properties: - mode: chat - context_size: 32768 -parameter_rules: - - name: stream - use_template: boolean - label: - en_US: "Stream" - zh_Hans: "流式" - type: boolean - default: true - required: true - help: - en_US: "Whether to return the results in batches through streaming. If set to true, the generated text will be pushed to the user in real time during the generation process." - zh_Hans: "是否通过流式分批返回结果。如果设置为 true,生成过程中实时地向用户推送每一部分生成的文本。" - - - name: max_tokens - use_template: max_tokens - label: - en_US: "Max Tokens" - zh_Hans: "最大Token数" - type: int - default: 512 - min: 1 - required: true - help: - en_US: "The maximum number of tokens that can be generated by the model varies depending on the model." - zh_Hans: "模型可生成的最大 token 个数,不同模型上限不同。" - - - name: temperature - use_template: temperature - label: - en_US: "Temperature" - zh_Hans: "采样温度" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_p - use_template: top_p - label: - en_US: "Top P" - zh_Hans: "Top P" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_k - use_template: top_k - label: - en_US: "Top K" - zh_Hans: "Top K" - type: int - default: 50 - min: 0 - max: 100 - required: true - help: - en_US: "The value range is [0,100], which limits the model to only select from the top k words with the highest probability when choosing the next word at each step. The larger the value, the more diverse text generation will be." - zh_Hans: "取值范围为 [0,100],限制模型在每一步选择下一个词时,只从概率最高的前 k 个词中选取。数值越大,文本生成越多样。" - - - name: frequency_penalty - use_template: frequency_penalty - label: - en_US: "Frequency Penalty" - zh_Hans: "频率惩罚" - type: float - default: 0 - min: -1.0 - max: 1.0 - precision: 1 - required: false - help: - en_US: "Used to adjust the frequency of repeated content in automatically generated text. Positive numbers reduce repetition, while negative numbers increase repetition. After setting this parameter, if a word has already appeared in the text, the model will decrease the probability of choosing that word for subsequent generation." - zh_Hans: "用于调整自动生成文本中重复内容的频率。正数减少重复,负数增加重复。设置此参数后,如果一个词在文本中已经出现过,模型在后续生成中选择该词的概率会降低。" - - - name: user - use_template: text - label: - en_US: "User" - zh_Hans: "用户" - type: string - required: false - help: - en_US: "Used to track and differentiate conversation requests from different users." - zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/gitee_ai/llm/llm.py b/api/core/model_runtime/model_providers/gitee_ai/llm/llm.py deleted file mode 100644 index 1b85b7fced..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/llm/llm.py +++ /dev/null @@ -1,51 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union - -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult -from core.model_runtime.entities.message_entities import ( - PromptMessage, - PromptMessageTool, -) -from core.model_runtime.entities.model_entities import ModelFeature -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - - -class GiteeAILargeLanguageModel(OAIAPICompatLargeLanguageModel): - MODEL_TO_IDENTITY: dict[str, str] = { - "Yi-1.5-34B-Chat": "Yi-34B-Chat", - "deepseek-coder-33B-instruct-completions": "deepseek-coder-33B-instruct", - "deepseek-coder-33B-instruct-chat": "deepseek-coder-33B-instruct", - } - - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._add_custom_parameters(credentials, model, model_parameters) - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials, model, None) - super().validate_credentials(model, credentials) - - def _add_custom_parameters(self, credentials: dict, model: str, model_parameters: dict) -> None: - if model is None: - model = "bge-large-zh-v1.5" - - model_identity = GiteeAILargeLanguageModel.MODEL_TO_IDENTITY.get(model, model) - credentials["endpoint_url"] = f"https://ai.gitee.com/api/serverless/{model_identity}/" - if model.endswith("completions"): - credentials["mode"] = LLMMode.COMPLETION.value - else: - credentials["mode"] = LLMMode.CHAT.value - - schema = self.get_model_schema(model, credentials) - if ModelFeature.TOOL_CALL in schema.features or ModelFeature.MULTI_TOOL_CALL in schema.features: - credentials["function_calling_type"] = "tool_call" diff --git a/api/core/model_runtime/model_providers/gitee_ai/rerank/__init__.py b/api/core/model_runtime/model_providers/gitee_ai/rerank/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api/core/model_runtime/model_providers/gitee_ai/rerank/_position.yaml b/api/core/model_runtime/model_providers/gitee_ai/rerank/_position.yaml deleted file mode 100644 index 83162fd338..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/rerank/_position.yaml +++ /dev/null @@ -1 +0,0 @@ -- bge-reranker-v2-m3 diff --git a/api/core/model_runtime/model_providers/gitee_ai/rerank/bge-reranker-v2-m3.yaml b/api/core/model_runtime/model_providers/gitee_ai/rerank/bge-reranker-v2-m3.yaml deleted file mode 100644 index f0681641e1..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/rerank/bge-reranker-v2-m3.yaml +++ /dev/null @@ -1,4 +0,0 @@ -model: bge-reranker-v2-m3 -model_type: rerank -model_properties: - context_size: 1024 diff --git a/api/core/model_runtime/model_providers/gitee_ai/rerank/rerank.py b/api/core/model_runtime/model_providers/gitee_ai/rerank/rerank.py deleted file mode 100644 index 231345c2f4..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/rerank/rerank.py +++ /dev/null @@ -1,128 +0,0 @@ -from typing import Optional - -import httpx - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.rerank_model import RerankModel - - -class GiteeAIRerankModel(RerankModel): - """ - Model class for rerank model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - """ - Invoke rerank model - - :param model: model name - :param credentials: model credentials - :param query: search query - :param docs: docs for reranking - :param score_threshold: score threshold - :param top_n: top n documents to return - :param user: unique user id - :return: rerank result - """ - if len(docs) == 0: - return RerankResult(model=model, docs=[]) - - base_url = credentials.get("base_url", "https://ai.gitee.com/api/serverless") - base_url = base_url.removesuffix("/") - - try: - body = {"model": model, "query": query, "documents": docs} - if top_n is not None: - body["top_n"] = top_n - response = httpx.post( - f"{base_url}/{model}/rerank", - json=body, - headers={"Authorization": f"Bearer {credentials.get('api_key')}"}, - ) - - response.raise_for_status() - results = response.json() - - rerank_documents = [] - for result in results["results"]: - rerank_document = RerankDocument( - index=result["index"], - text=result["document"]["text"], - score=result["relevance_score"], - ) - if score_threshold is None or result["relevance_score"] >= score_threshold: - rerank_documents.append(rerank_document) - return RerankResult(model=model, docs=rerank_documents) - except httpx.HTTPStatusError as e: - raise InvokeServerUnavailableError(str(e)) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke( - model=model, - credentials=credentials, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.01, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - """ - return { - InvokeConnectionError: [httpx.ConnectError], - InvokeServerUnavailableError: [httpx.RemoteProtocolError], - InvokeRateLimitError: [], - InvokeAuthorizationError: [httpx.HTTPStatusError], - InvokeBadRequestError: [httpx.RequestError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.RERANK, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size"))}, - ) - - return entity diff --git a/api/core/model_runtime/model_providers/gitee_ai/speech2text/__init__.py b/api/core/model_runtime/model_providers/gitee_ai/speech2text/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api/core/model_runtime/model_providers/gitee_ai/speech2text/_position.yaml b/api/core/model_runtime/model_providers/gitee_ai/speech2text/_position.yaml deleted file mode 100644 index 8e9b47598b..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/speech2text/_position.yaml +++ /dev/null @@ -1,2 +0,0 @@ -- whisper-base -- whisper-large diff --git a/api/core/model_runtime/model_providers/gitee_ai/speech2text/speech2text.py b/api/core/model_runtime/model_providers/gitee_ai/speech2text/speech2text.py deleted file mode 100644 index 5597f5b43e..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/speech2text/speech2text.py +++ /dev/null @@ -1,53 +0,0 @@ -import os -from typing import IO, Optional - -import requests - -from core.model_runtime.errors.invoke import InvokeBadRequestError -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel -from core.model_runtime.model_providers.gitee_ai._common import _CommonGiteeAI - - -class GiteeAISpeech2TextModel(_CommonGiteeAI, Speech2TextModel): - """ - Model class for OpenAI Compatible Speech to text model. - """ - - def _invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: - """ - Invoke speech2text model - - :param model: model name - :param credentials: model credentials - :param file: audio file - :param user: unique user id - :return: text for given audio file - """ - # doc: https://ai.gitee.com/docs/openapi/serverless#tag/serverless/POST/{service}/speech-to-text - - endpoint_url = f"https://ai.gitee.com/api/serverless/{model}/speech-to-text" - files = [("file", file)] - _, file_ext = os.path.splitext(file.name) - headers = {"Content-Type": f"audio/{file_ext}", "Authorization": f"Bearer {credentials.get('api_key')}"} - response = requests.post(endpoint_url, headers=headers, files=files) - if response.status_code != 200: - raise InvokeBadRequestError(response.text) - response_data = response.json() - return response_data["text"] - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - audio_file_path = self._get_demo_file_path() - - with open(audio_file_path, "rb") as audio_file: - self._invoke(model, credentials, audio_file) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) diff --git a/api/core/model_runtime/model_providers/gitee_ai/speech2text/whisper-base.yaml b/api/core/model_runtime/model_providers/gitee_ai/speech2text/whisper-base.yaml deleted file mode 100644 index a50bf5fc2d..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/speech2text/whisper-base.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: whisper-base -model_type: speech2text -model_properties: - file_upload_limit: 1 - supported_file_extensions: flac,mp3,mp4,mpeg,mpga,m4a,ogg,wav,webm diff --git a/api/core/model_runtime/model_providers/gitee_ai/speech2text/whisper-large.yaml b/api/core/model_runtime/model_providers/gitee_ai/speech2text/whisper-large.yaml deleted file mode 100644 index 1be7b1a391..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/speech2text/whisper-large.yaml +++ /dev/null @@ -1,5 +0,0 @@ -model: whisper-large -model_type: speech2text -model_properties: - file_upload_limit: 1 - supported_file_extensions: flac,mp3,mp4,mpeg,mpga,m4a,ogg,wav,webm diff --git a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/_position.yaml b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/_position.yaml deleted file mode 100644 index e8abe6440d..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/_position.yaml +++ /dev/null @@ -1,3 +0,0 @@ -- bge-large-zh-v1.5 -- bge-small-zh-v1.5 -- bge-m3 diff --git a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-large-zh-v1.5.yaml b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-large-zh-v1.5.yaml deleted file mode 100644 index 9e3ca76e88..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-large-zh-v1.5.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: bge-large-zh-v1.5 -label: - zh_Hans: bge-large-zh-v1.5 - en_US: bge-large-zh-v1.5 -model_type: text-embedding -model_properties: - context_size: 200000 - max_chunks: 20 diff --git a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-m3.yaml b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-m3.yaml deleted file mode 100644 index a7a99a98a3..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-m3.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: bge-m3 -label: - zh_Hans: bge-m3 - en_US: bge-m3 -model_type: text-embedding -model_properties: - context_size: 200000 - max_chunks: 20 diff --git a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-small-zh-v1.5.yaml b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-small-zh-v1.5.yaml deleted file mode 100644 index bd760408fa..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/bge-small-zh-v1.5.yaml +++ /dev/null @@ -1,8 +0,0 @@ -model: bge-small-zh-v1.5 -label: - zh_Hans: bge-small-zh-v1.5 - en_US: bge-small-zh-v1.5 -model_type: text-embedding -model_properties: - context_size: 200000 - max_chunks: 20 diff --git a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/gitee_ai/text_embedding/text_embedding.py deleted file mode 100644 index b833c5652c..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/text_embedding/text_embedding.py +++ /dev/null @@ -1,31 +0,0 @@ -from typing import Optional - -from core.entities.embedding_type import EmbeddingInputType -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult -from core.model_runtime.model_providers.openai_api_compatible.text_embedding.text_embedding import ( - OAICompatEmbeddingModel, -) - - -class GiteeAIEmbeddingModel(OAICompatEmbeddingModel): - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - self._add_custom_parameters(credentials, model) - return super()._invoke(model, credentials, texts, user, input_type) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials, None) - super().validate_credentials(model, credentials) - - @staticmethod - def _add_custom_parameters(credentials: dict, model: str) -> None: - if model is None: - model = "bge-m3" - - credentials["endpoint_url"] = f"https://ai.gitee.com/api/serverless/{model}/v1/" diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/ChatTTS.yaml b/api/core/model_runtime/model_providers/gitee_ai/tts/ChatTTS.yaml deleted file mode 100644 index 940391dfab..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/tts/ChatTTS.yaml +++ /dev/null @@ -1,11 +0,0 @@ -model: ChatTTS -model_type: tts -model_properties: - default_voice: 'default' - voices: - - mode: 'default' - name: 'Default' - language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] - word_limit: 3500 - audio_type: 'mp3' - max_workers: 5 diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/FunAudioLLM-CosyVoice-300M.yaml b/api/core/model_runtime/model_providers/gitee_ai/tts/FunAudioLLM-CosyVoice-300M.yaml deleted file mode 100644 index 8fc5734801..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/tts/FunAudioLLM-CosyVoice-300M.yaml +++ /dev/null @@ -1,11 +0,0 @@ -model: FunAudioLLM-CosyVoice-300M -model_type: tts -model_properties: - default_voice: 'default' - voices: - - mode: 'default' - name: 'Default' - language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] - word_limit: 3500 - audio_type: 'mp3' - max_workers: 5 diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/__init__.py b/api/core/model_runtime/model_providers/gitee_ai/tts/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/_position.yaml b/api/core/model_runtime/model_providers/gitee_ai/tts/_position.yaml deleted file mode 100644 index 13c6ec8454..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/tts/_position.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- speecht5_tts -- ChatTTS -- fish-speech-1.2-sft -- FunAudioLLM-CosyVoice-300M diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/fish-speech-1.2-sft.yaml b/api/core/model_runtime/model_providers/gitee_ai/tts/fish-speech-1.2-sft.yaml deleted file mode 100644 index 93cc28bc9d..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/tts/fish-speech-1.2-sft.yaml +++ /dev/null @@ -1,11 +0,0 @@ -model: fish-speech-1.2-sft -model_type: tts -model_properties: - default_voice: 'default' - voices: - - mode: 'default' - name: 'Default' - language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] - word_limit: 3500 - audio_type: 'mp3' - max_workers: 5 diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/speecht5_tts.yaml b/api/core/model_runtime/model_providers/gitee_ai/tts/speecht5_tts.yaml deleted file mode 100644 index f9c843bd41..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/tts/speecht5_tts.yaml +++ /dev/null @@ -1,11 +0,0 @@ -model: speecht5_tts -model_type: tts -model_properties: - default_voice: 'default' - voices: - - mode: 'default' - name: 'Default' - language: [ 'zh-Hans', 'en-US', 'de-DE', 'fr-FR', 'es-ES', 'it-IT', 'th-TH', 'id-ID' ] - word_limit: 3500 - audio_type: 'mp3' - max_workers: 5 diff --git a/api/core/model_runtime/model_providers/gitee_ai/tts/tts.py b/api/core/model_runtime/model_providers/gitee_ai/tts/tts.py deleted file mode 100644 index ed2bd5b13d..0000000000 --- a/api/core/model_runtime/model_providers/gitee_ai/tts/tts.py +++ /dev/null @@ -1,79 +0,0 @@ -from typing import Optional - -import requests - -from core.model_runtime.errors.invoke import InvokeBadRequestError -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.tts_model import TTSModel -from core.model_runtime.model_providers.gitee_ai._common import _CommonGiteeAI - - -class GiteeAIText2SpeechModel(_CommonGiteeAI, TTSModel): - """ - Model class for OpenAI Speech to text model. - """ - - def _invoke( - self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None - ) -> any: - """ - _invoke text2speech model - - :param model: model name - :param tenant_id: user tenant id - :param credentials: model credentials - :param content_text: text content to be translated - :param voice: model timbre - :param user: unique user id - :return: text translated to audio file - """ - return self._tts_invoke_streaming(model=model, credentials=credentials, content_text=content_text, voice=voice) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - validate credentials text2speech model - - :param model: model name - :param credentials: model credentials - :return: text translated to audio file - """ - try: - self._tts_invoke_streaming( - model=model, - credentials=credentials, - content_text="Hello Dify!", - voice=self._get_model_default_voice(model, credentials), - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> any: - """ - _tts_invoke_streaming text2speech model - :param model: model name - :param credentials: model credentials - :param content_text: text content to be translated - :param voice: model timbre - :return: text translated to audio file - """ - try: - # doc: https://ai.gitee.com/docs/openapi/serverless#tag/serverless/POST/{service}/text-to-speech - endpoint_url = "https://ai.gitee.com/api/serverless/" + model + "/text-to-speech" - - headers = {"Content-Type": "application/json"} - api_key = credentials.get("api_key") - if api_key: - headers["Authorization"] = f"Bearer {api_key}" - - payload = {"inputs": content_text} - response = requests.post(endpoint_url, headers=headers, json=payload) - - if response.status_code != 200: - raise InvokeBadRequestError(response.text) - - data = response.content - - for i in range(0, len(data), 1024): - yield data[i : i + 1024] - except Exception as ex: - raise InvokeBadRequestError(str(ex)) diff --git a/api/core/model_runtime/model_providers/gpustack/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/gpustack/_assets/icon_l_en.png deleted file mode 100644 index dfe8e78049..0000000000 Binary files a/api/core/model_runtime/model_providers/gpustack/_assets/icon_l_en.png and /dev/null differ diff --git a/api/core/model_runtime/model_providers/gpustack/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/gpustack/_assets/icon_l_en.svg deleted file mode 100644 index bb23bffcf1..0000000000 --- a/api/core/model_runtime/model_providers/gpustack/_assets/icon_l_en.svg +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/gpustack/_assets/icon_s_en.png b/api/core/model_runtime/model_providers/gpustack/_assets/icon_s_en.png deleted file mode 100644 index b154821db9..0000000000 Binary files a/api/core/model_runtime/model_providers/gpustack/_assets/icon_s_en.png and /dev/null differ diff --git a/api/core/model_runtime/model_providers/gpustack/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/gpustack/_assets/icon_s_en.svg deleted file mode 100644 index c5c608cd7c..0000000000 --- a/api/core/model_runtime/model_providers/gpustack/_assets/icon_s_en.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - diff --git a/api/core/model_runtime/model_providers/gpustack/gpustack.py b/api/core/model_runtime/model_providers/gpustack/gpustack.py deleted file mode 100644 index 321100167e..0000000000 --- a/api/core/model_runtime/model_providers/gpustack/gpustack.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class GPUStackProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/gpustack/gpustack.yaml b/api/core/model_runtime/model_providers/gpustack/gpustack.yaml deleted file mode 100644 index ee4a3c159a..0000000000 --- a/api/core/model_runtime/model_providers/gpustack/gpustack.yaml +++ /dev/null @@ -1,120 +0,0 @@ -provider: gpustack -label: - en_US: GPUStack -icon_small: - en_US: icon_s_en.png -icon_large: - en_US: icon_l_en.png -supported_model_types: - - llm - - text-embedding - - rerank -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - zh_Hans: 模型名称 - placeholder: - en_US: Enter your model name - zh_Hans: 输入模型名称 - credential_form_schemas: - - variable: endpoint_url - label: - zh_Hans: 服务器地址 - en_US: Server URL - type: text-input - required: true - placeholder: - zh_Hans: 输入 GPUStack 的服务器地址,如 http://192.168.1.100 - en_US: Enter the GPUStack server URL, e.g. http://192.168.1.100 - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 输入您的 API Key - en_US: Enter your API Key - - variable: mode - show_on: - - variable: __model_type - value: llm - label: - en_US: Completion mode - type: select - required: false - default: chat - placeholder: - zh_Hans: 选择补全类型 - en_US: Select completion type - options: - - value: completion - label: - en_US: Completion - zh_Hans: 补全 - - value: chat - label: - en_US: Chat - zh_Hans: 对话 - - variable: context_size - label: - zh_Hans: 模型上下文长度 - en_US: Model context size - required: true - type: text-input - default: "8192" - placeholder: - zh_Hans: 输入您的模型上下文长度 - en_US: Enter your Model context size - - variable: max_tokens_to_sample - label: - zh_Hans: 最大 token 上限 - en_US: Upper bound for max tokens - show_on: - - variable: __model_type - value: llm - default: "8192" - type: text-input - - variable: function_calling_type - show_on: - - variable: __model_type - value: llm - label: - en_US: Function calling - type: select - required: false - default: no_call - options: - - value: function_call - label: - en_US: Function Call - zh_Hans: Function Call - - value: tool_call - label: - en_US: Tool Call - zh_Hans: Tool Call - - value: no_call - label: - en_US: Not Support - zh_Hans: 不支持 - - variable: vision_support - show_on: - - variable: __model_type - value: llm - label: - zh_Hans: Vision 支持 - en_US: Vision Support - type: select - required: false - default: no_support - options: - - value: support - label: - en_US: Support - zh_Hans: 支持 - - value: no_support - label: - en_US: Not Support - zh_Hans: 不支持 diff --git a/api/core/model_runtime/model_providers/gpustack/llm/__init__.py b/api/core/model_runtime/model_providers/gpustack/llm/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api/core/model_runtime/model_providers/gpustack/llm/llm.py b/api/core/model_runtime/model_providers/gpustack/llm/llm.py deleted file mode 100644 index ce6780b6a7..0000000000 --- a/api/core/model_runtime/model_providers/gpustack/llm/llm.py +++ /dev/null @@ -1,45 +0,0 @@ -from collections.abc import Generator - -from yarl import URL - -from core.model_runtime.entities.llm_entities import LLMResult -from core.model_runtime.entities.message_entities import ( - PromptMessage, - PromptMessageTool, -) -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import ( - OAIAPICompatLargeLanguageModel, -) - - -class GPUStackLanguageModel(OAIAPICompatLargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: list[PromptMessageTool] | None = None, - stop: list[str] | None = None, - stream: bool = True, - user: str | None = None, - ) -> LLMResult | Generator: - return super()._invoke( - model, - credentials, - prompt_messages, - model_parameters, - tools, - stop, - stream, - user, - ) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - @staticmethod - def _add_custom_parameters(credentials: dict) -> None: - credentials["endpoint_url"] = str(URL(credentials["endpoint_url"]) / "v1-openai") - credentials["mode"] = "chat" diff --git a/api/core/model_runtime/model_providers/gpustack/rerank/__init__.py b/api/core/model_runtime/model_providers/gpustack/rerank/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api/core/model_runtime/model_providers/gpustack/rerank/rerank.py b/api/core/model_runtime/model_providers/gpustack/rerank/rerank.py deleted file mode 100644 index 5ea7532564..0000000000 --- a/api/core/model_runtime/model_providers/gpustack/rerank/rerank.py +++ /dev/null @@ -1,146 +0,0 @@ -from json import dumps -from typing import Optional - -import httpx -from requests import post -from yarl import URL - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - FetchFrom, - ModelPropertyKey, - ModelType, -) -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.errors.invoke import ( - InvokeAuthorizationError, - InvokeBadRequestError, - InvokeConnectionError, - InvokeError, - InvokeRateLimitError, - InvokeServerUnavailableError, -) -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.rerank_model import RerankModel - - -class GPUStackRerankModel(RerankModel): - """ - Model class for GPUStack rerank model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - query: str, - docs: list[str], - score_threshold: Optional[float] = None, - top_n: Optional[int] = None, - user: Optional[str] = None, - ) -> RerankResult: - """ - Invoke rerank model - - :param model: model name - :param credentials: model credentials - :param query: search query - :param docs: docs for reranking - :param score_threshold: score threshold - :param top_n: top n documents to return - :param user: unique user id - :return: rerank result - """ - if len(docs) == 0: - return RerankResult(model=model, docs=[]) - - endpoint_url = credentials["endpoint_url"] - headers = { - "Authorization": f"Bearer {credentials.get('api_key')}", - "Content-Type": "application/json", - } - - data = {"model": model, "query": query, "documents": docs, "top_n": top_n} - - try: - response = post( - str(URL(endpoint_url) / "v1" / "rerank"), - headers=headers, - data=dumps(data), - timeout=10, - ) - response.raise_for_status() - results = response.json() - - rerank_documents = [] - for result in results["results"]: - index = result["index"] - if "document" in result: - text = result["document"]["text"] - else: - text = docs[index] - - rerank_document = RerankDocument( - index=index, - text=text, - score=result["relevance_score"], - ) - - if score_threshold is None or result["relevance_score"] >= score_threshold: - rerank_documents.append(rerank_document) - - return RerankResult(model=model, docs=rerank_documents) - except httpx.HTTPStatusError as e: - raise InvokeServerUnavailableError(str(e)) - - def validate_credentials(self, model: str, credentials: dict) -> None: - """ - Validate model credentials - - :param model: model name - :param credentials: model credentials - :return: - """ - try: - self._invoke( - model=model, - credentials=credentials, - query="What is the capital of the United States?", - docs=[ - "Carson City is the capital city of the American state of Nevada. At the 2010 United States " - "Census, Carson City had a population of 55,274.", - "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " - "are a political division controlled by the United States. Its capital is Saipan.", - ], - score_threshold=0.8, - ) - except Exception as ex: - raise CredentialsValidateFailedError(str(ex)) - - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - """ - Map model invoke error to unified error - """ - return { - InvokeConnectionError: [httpx.ConnectError], - InvokeServerUnavailableError: [httpx.RemoteProtocolError], - InvokeRateLimitError: [], - InvokeAuthorizationError: [httpx.HTTPStatusError], - InvokeBadRequestError: [httpx.RequestError], - } - - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - """ - generate custom model entities from credentials - """ - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.RERANK, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_properties={ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size"))}, - ) - - return entity diff --git a/api/core/model_runtime/model_providers/gpustack/text_embedding/__init__.py b/api/core/model_runtime/model_providers/gpustack/text_embedding/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api/core/model_runtime/model_providers/gpustack/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/gpustack/text_embedding/text_embedding.py deleted file mode 100644 index eb324491a2..0000000000 --- a/api/core/model_runtime/model_providers/gpustack/text_embedding/text_embedding.py +++ /dev/null @@ -1,35 +0,0 @@ -from typing import Optional - -from yarl import URL - -from core.entities.embedding_type import EmbeddingInputType -from core.model_runtime.entities.text_embedding_entities import ( - TextEmbeddingResult, -) -from core.model_runtime.model_providers.openai_api_compatible.text_embedding.text_embedding import ( - OAICompatEmbeddingModel, -) - - -class GPUStackTextEmbeddingModel(OAICompatEmbeddingModel): - """ - Model class for GPUStack text embedding model. - """ - - def _invoke( - self, - model: str, - credentials: dict, - texts: list[str], - user: Optional[str] = None, - input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, - ) -> TextEmbeddingResult: - return super()._invoke(model, credentials, texts, user, input_type) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - @staticmethod - def _add_custom_parameters(credentials: dict) -> None: - credentials["endpoint_url"] = str(URL(credentials["endpoint_url"]) / "v1-openai") diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet-v2.yaml b/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet-v2.yaml deleted file mode 100644 index 37b9f30cc3..0000000000 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/anthropic.claude-3.5-sonnet-v2.yaml +++ /dev/null @@ -1,55 +0,0 @@ -model: claude-3-5-sonnet-v2@20241022 -label: - en_US: Claude 3.5 Sonnet v2 -model_type: llm -features: - - agent-thought - - vision -model_properties: - mode: chat - context_size: 200000 -parameter_rules: - - name: max_tokens - use_template: max_tokens - required: true - type: int - default: 8192 - min: 1 - max: 8192 - help: - zh_Hans: 停止前生成的最大令牌数。请注意,Anthropic Claude 模型可能会在达到 max_tokens 的值之前停止生成令牌。不同的 Anthropic Claude 模型对此参数具有不同的最大值。 - en_US: The maximum number of tokens to generate before stopping. Note that Anthropic Claude models might stop generating tokens before reaching the value of max_tokens. Different Anthropic Claude models have different maximum values for this parameter. - - name: temperature - use_template: temperature - required: false - type: float - default: 1 - min: 0.0 - max: 1.0 - help: - zh_Hans: 生成内容的随机性。 - en_US: The amount of randomness injected into the response. - - name: top_p - required: false - type: float - default: 0.999 - min: 0.000 - max: 1.000 - help: - zh_Hans: 在核采样中,Anthropic Claude 按概率递减顺序计算每个后续标记的所有选项的累积分布,并在达到 top_p 指定的特定概率时将其切断。您应该更改温度或top_p,但不能同时更改两者。 - en_US: In nucleus sampling, Anthropic Claude computes the cumulative distribution over all the options for each subsequent token in decreasing probability order and cuts it off once it reaches a particular probability specified by top_p. You should alter either temperature or top_p, but not both. - - name: top_k - required: false - type: int - default: 0 - min: 0 - # tip docs from aws has error, max value is 500 - max: 500 - help: - zh_Hans: 对于每个后续标记,仅从前 K 个选项中进行采样。使用 top_k 删除长尾低概率响应。 - en_US: Only sample from the top K options for each subsequent token. Use top_k to remove long tail low probability responses. -pricing: - input: '0.003' - output: '0.015' - unit: '0.001' - currency: USD diff --git a/api/core/model_runtime/model_providers/vessl_ai/__init__.py b/api/core/model_runtime/model_providers/vessl_ai/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api/core/model_runtime/model_providers/vessl_ai/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/vessl_ai/_assets/icon_l_en.png deleted file mode 100644 index 18ba350fa0..0000000000 Binary files a/api/core/model_runtime/model_providers/vessl_ai/_assets/icon_l_en.png and /dev/null differ diff --git a/api/core/model_runtime/model_providers/vessl_ai/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/vessl_ai/_assets/icon_s_en.svg deleted file mode 100644 index 242f4e82b2..0000000000 --- a/api/core/model_runtime/model_providers/vessl_ai/_assets/icon_s_en.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/api/core/model_runtime/model_providers/vessl_ai/llm/__init__.py b/api/core/model_runtime/model_providers/vessl_ai/llm/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api/core/model_runtime/model_providers/vessl_ai/llm/llm.py b/api/core/model_runtime/model_providers/vessl_ai/llm/llm.py deleted file mode 100644 index 034c066ab5..0000000000 --- a/api/core/model_runtime/model_providers/vessl_ai/llm/llm.py +++ /dev/null @@ -1,83 +0,0 @@ -from decimal import Decimal - -from core.model_runtime.entities.common_entities import I18nObject -from core.model_runtime.entities.llm_entities import LLMMode -from core.model_runtime.entities.model_entities import ( - AIModelEntity, - DefaultParameterName, - FetchFrom, - ModelPropertyKey, - ModelType, - ParameterRule, - ParameterType, - PriceConfig, -) -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - - -class VesslAILargeLanguageModel(OAIAPICompatLargeLanguageModel): - def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: - features = [] - - entity = AIModelEntity( - model=model, - label=I18nObject(en_US=model), - model_type=ModelType.LLM, - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - features=features, - model_properties={ - ModelPropertyKey.MODE: credentials.get("mode"), - }, - parameter_rules=[ - ParameterRule( - name=DefaultParameterName.TEMPERATURE.value, - label=I18nObject(en_US="Temperature"), - type=ParameterType.FLOAT, - default=float(credentials.get("temperature", 0.7)), - min=0, - max=2, - precision=2, - ), - ParameterRule( - name=DefaultParameterName.TOP_P.value, - label=I18nObject(en_US="Top P"), - type=ParameterType.FLOAT, - default=float(credentials.get("top_p", 1)), - min=0, - max=1, - precision=2, - ), - ParameterRule( - name=DefaultParameterName.TOP_K.value, - label=I18nObject(en_US="Top K"), - type=ParameterType.INT, - default=int(credentials.get("top_k", 50)), - min=-2147483647, - max=2147483647, - precision=0, - ), - ParameterRule( - name=DefaultParameterName.MAX_TOKENS.value, - label=I18nObject(en_US="Max Tokens"), - type=ParameterType.INT, - default=512, - min=1, - max=int(credentials.get("max_tokens_to_sample", 4096)), - ), - ], - pricing=PriceConfig( - input=Decimal(credentials.get("input_price", 0)), - output=Decimal(credentials.get("output_price", 0)), - unit=Decimal(credentials.get("unit", 0)), - currency=credentials.get("currency", "USD"), - ), - ) - - if credentials["mode"] == "chat": - entity.model_properties[ModelPropertyKey.MODE] = LLMMode.CHAT.value - elif credentials["mode"] == "completion": - entity.model_properties[ModelPropertyKey.MODE] = LLMMode.COMPLETION.value - else: - raise ValueError(f"Unknown completion type {credentials['completion_type']}") - - return entity diff --git a/api/core/model_runtime/model_providers/vessl_ai/vessl_ai.py b/api/core/model_runtime/model_providers/vessl_ai/vessl_ai.py deleted file mode 100644 index 7a987c6710..0000000000 --- a/api/core/model_runtime/model_providers/vessl_ai/vessl_ai.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class VesslAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - pass diff --git a/api/core/model_runtime/model_providers/vessl_ai/vessl_ai.yaml b/api/core/model_runtime/model_providers/vessl_ai/vessl_ai.yaml deleted file mode 100644 index 2138b281b9..0000000000 --- a/api/core/model_runtime/model_providers/vessl_ai/vessl_ai.yaml +++ /dev/null @@ -1,56 +0,0 @@ -provider: vessl_ai -label: - en_US: VESSL AI -icon_small: - en_US: icon_s_en.svg -icon_large: - en_US: icon_l_en.png -background: "#F1EFED" -help: - title: - en_US: How to deploy VESSL AI LLM Model Endpoint - url: - en_US: https://docs.vessl.ai/guides/get-started/llama3-deployment -supported_model_types: - - llm -configurate_methods: - - customizable-model -model_credential_schema: - model: - label: - en_US: Model Name - placeholder: - en_US: Enter model name - credential_form_schemas: - - variable: endpoint_url - label: - en_US: Endpoint Url - type: text-input - required: true - placeholder: - en_US: Enter VESSL AI service endpoint url - - variable: api_key - required: true - label: - en_US: API Key - type: secret-input - placeholder: - en_US: Enter VESSL AI secret key - - variable: mode - show_on: - - variable: __model_type - value: llm - label: - en_US: Completion Mode - type: select - required: false - default: chat - placeholder: - en_US: Select completion mode - options: - - value: completion - label: - en_US: Completion - - value: chat - label: - en_US: Chat diff --git a/api/core/model_runtime/model_providers/x/__init__.py b/api/core/model_runtime/model_providers/x/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api/core/model_runtime/model_providers/x/_assets/x-ai-logo.svg b/api/core/model_runtime/model_providers/x/_assets/x-ai-logo.svg deleted file mode 100644 index f8b745cb13..0000000000 --- a/api/core/model_runtime/model_providers/x/_assets/x-ai-logo.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/x/llm/__init__.py b/api/core/model_runtime/model_providers/x/llm/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api/core/model_runtime/model_providers/x/llm/grok-beta.yaml b/api/core/model_runtime/model_providers/x/llm/grok-beta.yaml deleted file mode 100644 index 7c305735b9..0000000000 --- a/api/core/model_runtime/model_providers/x/llm/grok-beta.yaml +++ /dev/null @@ -1,63 +0,0 @@ -model: grok-beta -label: - en_US: Grok beta -model_type: llm -features: - - multi-tool-call -model_properties: - mode: chat - context_size: 131072 -parameter_rules: - - name: temperature - label: - en_US: "Temperature" - zh_Hans: "采样温度" - type: float - default: 0.7 - min: 0.0 - max: 2.0 - precision: 1 - required: true - help: - en_US: "The randomness of the sampling temperature control output. The temperature value is within the range of [0.0, 1.0]. The higher the value, the more random and creative the output; the lower the value, the more stable it is. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样温度控制输出的随机性。温度值在 [0.0, 1.0] 范围内,值越高,输出越随机和创造性;值越低,输出越稳定。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: top_p - label: - en_US: "Top P" - zh_Hans: "Top P" - type: float - default: 0.7 - min: 0.0 - max: 1.0 - precision: 1 - required: true - help: - en_US: "The value range of the sampling method is [0.0, 1.0]. The top_p value determines that the model selects tokens from the top p% of candidate words with the highest probability; when top_p is 0, this parameter is invalid. It is recommended to adjust either top_p or temperature parameters according to your needs to avoid adjusting both at the same time." - zh_Hans: "采样方法的取值范围为 [0.0,1.0]。top_p 值确定模型从概率最高的前p%的候选词中选取 tokens;当 top_p 为 0 时,此参数无效。建议根据需求调整 top_p 或 temperature 参数,避免同时调整两者。" - - - name: frequency_penalty - use_template: frequency_penalty - label: - en_US: "Frequency Penalty" - zh_Hans: "频率惩罚" - type: float - default: 0 - min: 0 - max: 2.0 - precision: 1 - required: false - help: - en_US: "Number between 0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim." - zh_Hans: "介于0和2.0之间的数字。正值会根据新标记在文本中迄今为止的现有频率来惩罚它们,从而降低模型一字不差地重复同一句话的可能性。" - - - name: user - use_template: text - label: - en_US: "User" - zh_Hans: "用户" - type: string - required: false - help: - en_US: "Used to track and differentiate conversation requests from different users." - zh_Hans: "用于追踪和区分不同用户的对话请求。" diff --git a/api/core/model_runtime/model_providers/x/llm/llm.py b/api/core/model_runtime/model_providers/x/llm/llm.py deleted file mode 100644 index 3f5325a857..0000000000 --- a/api/core/model_runtime/model_providers/x/llm/llm.py +++ /dev/null @@ -1,37 +0,0 @@ -from collections.abc import Generator -from typing import Optional, Union - -from yarl import URL - -from core.model_runtime.entities.llm_entities import LLMMode, LLMResult -from core.model_runtime.entities.message_entities import ( - PromptMessage, - PromptMessageTool, -) -from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel - - -class XAILargeLanguageModel(OAIAPICompatLargeLanguageModel): - def _invoke( - self, - model: str, - credentials: dict, - prompt_messages: list[PromptMessage], - model_parameters: dict, - tools: Optional[list[PromptMessageTool]] = None, - stop: Optional[list[str]] = None, - stream: bool = True, - user: Optional[str] = None, - ) -> Union[LLMResult, Generator]: - self._add_custom_parameters(credentials) - return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) - - def validate_credentials(self, model: str, credentials: dict) -> None: - self._add_custom_parameters(credentials) - super().validate_credentials(model, credentials) - - @staticmethod - def _add_custom_parameters(credentials) -> None: - credentials["endpoint_url"] = str(URL(credentials["endpoint_url"])) or "https://api.x.ai/v1" - credentials["mode"] = LLMMode.CHAT.value - credentials["function_calling_type"] = "tool_call" diff --git a/api/core/model_runtime/model_providers/x/x.py b/api/core/model_runtime/model_providers/x/x.py deleted file mode 100644 index e3f2b8eeba..0000000000 --- a/api/core/model_runtime/model_providers/x/x.py +++ /dev/null @@ -1,25 +0,0 @@ -import logging - -from core.model_runtime.entities.model_entities import ModelType -from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.model_provider import ModelProvider - -logger = logging.getLogger(__name__) - - -class XAIProvider(ModelProvider): - def validate_provider_credentials(self, credentials: dict) -> None: - """ - Validate provider credentials - if validate failed, raise exception - - :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. - """ - try: - model_instance = self.get_model_instance(ModelType.LLM) - model_instance.validate_credentials(model="grok-beta", credentials=credentials) - except CredentialsValidateFailedError as ex: - raise ex - except Exception as ex: - logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") - raise ex diff --git a/api/core/model_runtime/model_providers/x/x.yaml b/api/core/model_runtime/model_providers/x/x.yaml deleted file mode 100644 index 90d1cbfe7e..0000000000 --- a/api/core/model_runtime/model_providers/x/x.yaml +++ /dev/null @@ -1,38 +0,0 @@ -provider: x -label: - en_US: xAI -description: - en_US: xAI is a company working on building artificial intelligence to accelerate human scientific discovery. We are guided by our mission to advance our collective understanding of the universe. -icon_small: - en_US: x-ai-logo.svg -icon_large: - en_US: x-ai-logo.svg -help: - title: - en_US: Get your token from xAI - zh_Hans: 从 xAI 获取 token - url: - en_US: https://x.ai/api -supported_model_types: - - llm -configurate_methods: - - predefined-model -provider_credential_schema: - credential_form_schemas: - - variable: api_key - label: - en_US: API Key - type: secret-input - required: true - placeholder: - zh_Hans: 在此输入您的 API Key - en_US: Enter your API Key - - variable: endpoint_url - label: - en_US: API Base - type: text-input - required: false - default: https://api.x.ai/v1 - placeholder: - zh_Hans: 在此输入您的 API Base - en_US: Enter your API Base diff --git a/api/core/moderation/keywords/keywords.py b/api/core/moderation/keywords/keywords.py index 4846da8f93..00b3c56c03 100644 --- a/api/core/moderation/keywords/keywords.py +++ b/api/core/moderation/keywords/keywords.py @@ -1,3 +1,6 @@ +from collections.abc import Sequence +from typing import Any + from core.moderation.base import Moderation, ModerationAction, ModerationInputsResult, ModerationOutputsResult @@ -62,5 +65,5 @@ class KeywordsModeration(Moderation): def _is_violated(self, inputs: dict, keywords_list: list) -> bool: return any(self._check_keywords_in_value(keywords_list, value) for value in inputs.values()) - def _check_keywords_in_value(self, keywords_list, value) -> bool: - return any(keyword.lower() in value.lower() for keyword in keywords_list) + def _check_keywords_in_value(self, keywords_list: Sequence[str], value: Any) -> bool: + return any(keyword.lower() in str(value).lower() for keyword in keywords_list) diff --git a/api/core/moderation/output_moderation.py b/api/core/moderation/output_moderation.py index 83f4d2d57d..4635bd9c25 100644 --- a/api/core/moderation/output_moderation.py +++ b/api/core/moderation/output_moderation.py @@ -126,6 +126,6 @@ class OutputModeration(BaseModel): result: ModerationOutputsResult = moderation_factory.moderation_for_outputs(moderation_buffer) return result except Exception as e: - logger.exception("Moderation Output error: %s", e) + logger.exception(f"Moderation Output error, app_id: {app_id}") return None diff --git a/api/core/ops/langsmith_trace/entities/langsmith_trace_entity.py b/api/core/ops/langsmith_trace/entities/langsmith_trace_entity.py index 05c932fb99..16c76f363c 100644 --- a/api/core/ops/langsmith_trace/entities/langsmith_trace_entity.py +++ b/api/core/ops/langsmith_trace/entities/langsmith_trace_entity.py @@ -49,6 +49,7 @@ class LangSmithRunModel(LangSmithTokenUsage, LangSmithMultiModel): reference_example_id: Optional[str] = Field(None, description="Reference example ID associated with the run") input_attachments: Optional[dict[str, Any]] = Field(None, description="Input attachments of the run") output_attachments: Optional[dict[str, Any]] = Field(None, description="Output attachments of the run") + dotted_order: Optional[str] = Field(None, description="Dotted order of the run") @field_validator("inputs", "outputs") @classmethod diff --git a/api/core/ops/langsmith_trace/langsmith_trace.py b/api/core/ops/langsmith_trace/langsmith_trace.py index ad45050405..c15b132abd 100644 --- a/api/core/ops/langsmith_trace/langsmith_trace.py +++ b/api/core/ops/langsmith_trace/langsmith_trace.py @@ -25,7 +25,7 @@ from core.ops.langsmith_trace.entities.langsmith_trace_entity import ( LangSmithRunType, LangSmithRunUpdateModel, ) -from core.ops.utils import filter_none_values +from core.ops.utils import filter_none_values, generate_dotted_order from extensions.ext_database import db from models.model import EndUser, MessageFile from models.workflow import WorkflowNodeExecution @@ -62,6 +62,16 @@ class LangSmithDataTrace(BaseTraceInstance): self.generate_name_trace(trace_info) def workflow_trace(self, trace_info: WorkflowTraceInfo): + trace_id = trace_info.message_id or trace_info.workflow_app_log_id or trace_info.workflow_run_id + message_dotted_order = ( + generate_dotted_order(trace_info.message_id, trace_info.start_time) if trace_info.message_id else None + ) + workflow_dotted_order = generate_dotted_order( + trace_info.workflow_app_log_id or trace_info.workflow_run_id, + trace_info.workflow_data.created_at, + message_dotted_order, + ) + if trace_info.message_id: message_run = LangSmithRunModel( id=trace_info.message_id, @@ -76,6 +86,8 @@ class LangSmithDataTrace(BaseTraceInstance): }, tags=["message", "workflow"], error=trace_info.error, + trace_id=trace_id, + dotted_order=message_dotted_order, ) self.add_run(message_run) @@ -95,6 +107,8 @@ class LangSmithDataTrace(BaseTraceInstance): error=trace_info.error, tags=["workflow"], parent_run_id=trace_info.message_id or None, + trace_id=trace_id, + dotted_order=workflow_dotted_order, ) self.add_run(langsmith_run) @@ -177,6 +191,7 @@ class LangSmithDataTrace(BaseTraceInstance): else: run_type = LangSmithRunType.tool + node_dotted_order = generate_dotted_order(node_execution_id, created_at, workflow_dotted_order) langsmith_run = LangSmithRunModel( total_tokens=node_total_tokens, name=node_type, @@ -191,6 +206,9 @@ class LangSmithDataTrace(BaseTraceInstance): }, parent_run_id=trace_info.workflow_app_log_id or trace_info.workflow_run_id, tags=["node_execution"], + id=node_execution_id, + trace_id=trace_id, + dotted_order=node_dotted_order, ) self.add_run(langsmith_run) diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 79704c115f..1069889abd 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -711,7 +711,7 @@ class TraceQueueManager: trace_task.app_id = self.app_id trace_manager_queue.put(trace_task) except Exception as e: - logging.exception(f"Error adding trace task: {e}") + logging.exception(f"Error adding trace task, trace_type {trace_task.trace_type}") finally: self.start_timer() @@ -730,7 +730,7 @@ class TraceQueueManager: if tasks: self.send_to_celery(tasks) except Exception as e: - logging.exception(f"Error processing trace tasks: {e}") + logging.exception("Error processing trace tasks") def start_timer(self): global trace_manager_timer diff --git a/api/core/ops/utils.py b/api/core/ops/utils.py index 3cd3fb5756..998eba9ea9 100644 --- a/api/core/ops/utils.py +++ b/api/core/ops/utils.py @@ -1,5 +1,6 @@ from contextlib import contextmanager from datetime import datetime +from typing import Optional, Union from extensions.ext_database import db from models.model import Message @@ -43,3 +44,19 @@ def replace_text_with_content(data): return [replace_text_with_content(item) for item in data] else: return data + + +def generate_dotted_order( + run_id: str, start_time: Union[str, datetime], parent_dotted_order: Optional[str] = None +) -> str: + """ + generate dotted_order for langsmith + """ + start_time = datetime.fromisoformat(start_time) if isinstance(start_time, str) else start_time + timestamp = start_time.strftime("%Y%m%dT%H%M%S%f")[:-3] + "Z" + current_segment = f"{timestamp}{run_id}" + + if parent_dotted_order is None: + return current_segment + + return f"{parent_dotted_order}.{current_segment}" diff --git a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector.py b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector.py index c77cb87376..09104ae422 100644 --- a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector.py +++ b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector.py @@ -1,310 +1,62 @@ import json from typing import Any -from pydantic import BaseModel - -_import_err_msg = ( - "`alibabacloud_gpdb20160503` and `alibabacloud_tea_openapi` packages not found, " - "please run `pip install alibabacloud_gpdb20160503 alibabacloud_tea_openapi`" -) - from configs import dify_config +from core.rag.datasource.vdb.analyticdb.analyticdb_vector_openapi import ( + AnalyticdbVectorOpenAPI, + AnalyticdbVectorOpenAPIConfig, +) +from core.rag.datasource.vdb.analyticdb.analyticdb_vector_sql import AnalyticdbVectorBySql, AnalyticdbVectorBySqlConfig from core.rag.datasource.vdb.vector_base import BaseVector from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory from core.rag.datasource.vdb.vector_type import VectorType from core.rag.embedding.embedding_base import Embeddings from core.rag.models.document import Document -from extensions.ext_redis import redis_client from models.dataset import Dataset -class AnalyticdbConfig(BaseModel): - access_key_id: str - access_key_secret: str - region_id: str - instance_id: str - account: str - account_password: str - namespace: str = ("dify",) - namespace_password: str = (None,) - metrics: str = ("cosine",) - read_timeout: int = 60000 - - def to_analyticdb_client_params(self): - return { - "access_key_id": self.access_key_id, - "access_key_secret": self.access_key_secret, - "region_id": self.region_id, - "read_timeout": self.read_timeout, - } - - class AnalyticdbVector(BaseVector): - def __init__(self, collection_name: str, config: AnalyticdbConfig): - self._collection_name = collection_name.lower() - try: - from alibabacloud_gpdb20160503.client import Client - from alibabacloud_tea_openapi import models as open_api_models - except: - raise ImportError(_import_err_msg) - self.config = config - self._client_config = open_api_models.Config(user_agent="dify", **config.to_analyticdb_client_params()) - self._client = Client(self._client_config) - self._initialize() - - def _initialize(self) -> None: - cache_key = f"vector_indexing_{self.config.instance_id}" - lock_name = f"{cache_key}_lock" - with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = f"vector_indexing_{self.config.instance_id}" - if redis_client.get(collection_exist_cache_key): - return - self._initialize_vector_database() - self._create_namespace_if_not_exists() - redis_client.set(collection_exist_cache_key, 1, ex=3600) - - def _initialize_vector_database(self) -> None: - from alibabacloud_gpdb20160503 import models as gpdb_20160503_models - - request = gpdb_20160503_models.InitVectorDatabaseRequest( - dbinstance_id=self.config.instance_id, - region_id=self.config.region_id, - manager_account=self.config.account, - manager_account_password=self.config.account_password, - ) - self._client.init_vector_database(request) - - def _create_namespace_if_not_exists(self) -> None: - from alibabacloud_gpdb20160503 import models as gpdb_20160503_models - from Tea.exceptions import TeaException - - try: - request = gpdb_20160503_models.DescribeNamespaceRequest( - dbinstance_id=self.config.instance_id, - region_id=self.config.region_id, - namespace=self.config.namespace, - manager_account=self.config.account, - manager_account_password=self.config.account_password, - ) - self._client.describe_namespace(request) - except TeaException as e: - if e.statusCode == 404: - request = gpdb_20160503_models.CreateNamespaceRequest( - dbinstance_id=self.config.instance_id, - region_id=self.config.region_id, - manager_account=self.config.account, - manager_account_password=self.config.account_password, - namespace=self.config.namespace, - namespace_password=self.config.namespace_password, - ) - self._client.create_namespace(request) - else: - raise ValueError(f"failed to create namespace {self.config.namespace}: {e}") - - def _create_collection_if_not_exists(self, embedding_dimension: int): - from alibabacloud_gpdb20160503 import models as gpdb_20160503_models - from Tea.exceptions import TeaException - - cache_key = f"vector_indexing_{self._collection_name}" - lock_name = f"{cache_key}_lock" - with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = f"vector_indexing_{self._collection_name}" - if redis_client.get(collection_exist_cache_key): - return - try: - request = gpdb_20160503_models.DescribeCollectionRequest( - dbinstance_id=self.config.instance_id, - region_id=self.config.region_id, - namespace=self.config.namespace, - namespace_password=self.config.namespace_password, - collection=self._collection_name, - ) - self._client.describe_collection(request) - except TeaException as e: - if e.statusCode == 404: - metadata = '{"ref_doc_id":"text","page_content":"text","metadata_":"jsonb"}' - full_text_retrieval_fields = "page_content" - request = gpdb_20160503_models.CreateCollectionRequest( - dbinstance_id=self.config.instance_id, - region_id=self.config.region_id, - manager_account=self.config.account, - manager_account_password=self.config.account_password, - namespace=self.config.namespace, - collection=self._collection_name, - dimension=embedding_dimension, - metrics=self.config.metrics, - metadata=metadata, - full_text_retrieval_fields=full_text_retrieval_fields, - ) - self._client.create_collection(request) - else: - raise ValueError(f"failed to create collection {self._collection_name}: {e}") - redis_client.set(collection_exist_cache_key, 1, ex=3600) + def __init__( + self, collection_name: str, api_config: AnalyticdbVectorOpenAPIConfig, sql_config: AnalyticdbVectorBySqlConfig + ): + super().__init__(collection_name) + if api_config is not None: + self.analyticdb_vector = AnalyticdbVectorOpenAPI(collection_name, api_config) + else: + self.analyticdb_vector = AnalyticdbVectorBySql(collection_name, sql_config) def get_type(self) -> str: return VectorType.ANALYTICDB def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): dimension = len(embeddings[0]) - self._create_collection_if_not_exists(dimension) - self.add_texts(texts, embeddings) + self.analyticdb_vector._create_collection_if_not_exists(dimension) + self.analyticdb_vector.add_texts(texts, embeddings) - def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): - from alibabacloud_gpdb20160503 import models as gpdb_20160503_models - - rows: list[gpdb_20160503_models.UpsertCollectionDataRequestRows] = [] - for doc, embedding in zip(documents, embeddings, strict=True): - metadata = { - "ref_doc_id": doc.metadata["doc_id"], - "page_content": doc.page_content, - "metadata_": json.dumps(doc.metadata), - } - rows.append( - gpdb_20160503_models.UpsertCollectionDataRequestRows( - vector=embedding, - metadata=metadata, - ) - ) - request = gpdb_20160503_models.UpsertCollectionDataRequest( - dbinstance_id=self.config.instance_id, - region_id=self.config.region_id, - namespace=self.config.namespace, - namespace_password=self.config.namespace_password, - collection=self._collection_name, - rows=rows, - ) - self._client.upsert_collection_data(request) + def add_texts(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + self.analyticdb_vector.add_texts(texts, embeddings) def text_exists(self, id: str) -> bool: - from alibabacloud_gpdb20160503 import models as gpdb_20160503_models - - request = gpdb_20160503_models.QueryCollectionDataRequest( - dbinstance_id=self.config.instance_id, - region_id=self.config.region_id, - namespace=self.config.namespace, - namespace_password=self.config.namespace_password, - collection=self._collection_name, - metrics=self.config.metrics, - include_values=True, - vector=None, - content=None, - top_k=1, - filter=f"ref_doc_id='{id}'", - ) - response = self._client.query_collection_data(request) - return len(response.body.matches.match) > 0 + return self.analyticdb_vector.text_exists(id) def delete_by_ids(self, ids: list[str]) -> None: - from alibabacloud_gpdb20160503 import models as gpdb_20160503_models - - ids_str = ",".join(f"'{id}'" for id in ids) - ids_str = f"({ids_str})" - request = gpdb_20160503_models.DeleteCollectionDataRequest( - dbinstance_id=self.config.instance_id, - region_id=self.config.region_id, - namespace=self.config.namespace, - namespace_password=self.config.namespace_password, - collection=self._collection_name, - collection_data=None, - collection_data_filter=f"ref_doc_id IN {ids_str}", - ) - self._client.delete_collection_data(request) + self.analyticdb_vector.delete_by_ids(ids) def delete_by_metadata_field(self, key: str, value: str) -> None: - from alibabacloud_gpdb20160503 import models as gpdb_20160503_models - - request = gpdb_20160503_models.DeleteCollectionDataRequest( - dbinstance_id=self.config.instance_id, - region_id=self.config.region_id, - namespace=self.config.namespace, - namespace_password=self.config.namespace_password, - collection=self._collection_name, - collection_data=None, - collection_data_filter=f"metadata_ ->> '{key}' = '{value}'", - ) - self._client.delete_collection_data(request) + self.analyticdb_vector.delete_by_metadata_field(key, value) def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: - from alibabacloud_gpdb20160503 import models as gpdb_20160503_models - - score_threshold = kwargs.get("score_threshold") or 0.0 - request = gpdb_20160503_models.QueryCollectionDataRequest( - dbinstance_id=self.config.instance_id, - region_id=self.config.region_id, - namespace=self.config.namespace, - namespace_password=self.config.namespace_password, - collection=self._collection_name, - include_values=kwargs.pop("include_values", True), - metrics=self.config.metrics, - vector=query_vector, - content=None, - top_k=kwargs.get("top_k", 4), - filter=None, - ) - response = self._client.query_collection_data(request) - documents = [] - for match in response.body.matches.match: - if match.score > score_threshold: - metadata = json.loads(match.metadata.get("metadata_")) - metadata["score"] = match.score - doc = Document( - page_content=match.metadata.get("page_content"), - metadata=metadata, - ) - documents.append(doc) - documents = sorted(documents, key=lambda x: x.metadata["score"], reverse=True) - return documents + return self.analyticdb_vector.search_by_vector(query_vector) def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: - from alibabacloud_gpdb20160503 import models as gpdb_20160503_models - - score_threshold = float(kwargs.get("score_threshold") or 0.0) - request = gpdb_20160503_models.QueryCollectionDataRequest( - dbinstance_id=self.config.instance_id, - region_id=self.config.region_id, - namespace=self.config.namespace, - namespace_password=self.config.namespace_password, - collection=self._collection_name, - include_values=kwargs.pop("include_values", True), - metrics=self.config.metrics, - vector=None, - content=query, - top_k=kwargs.get("top_k", 4), - filter=None, - ) - response = self._client.query_collection_data(request) - documents = [] - for match in response.body.matches.match: - if match.score > score_threshold: - metadata = json.loads(match.metadata.get("metadata_")) - metadata["score"] = match.score - doc = Document( - page_content=match.metadata.get("page_content"), - vector=match.metadata.get("vector"), - metadata=metadata, - ) - documents.append(doc) - documents = sorted(documents, key=lambda x: x.metadata["score"], reverse=True) - return documents + return self.analyticdb_vector.search_by_full_text(query, **kwargs) def delete(self) -> None: - try: - from alibabacloud_gpdb20160503 import models as gpdb_20160503_models - - request = gpdb_20160503_models.DeleteCollectionRequest( - collection=self._collection_name, - dbinstance_id=self.config.instance_id, - namespace=self.config.namespace, - namespace_password=self.config.namespace_password, - region_id=self.config.region_id, - ) - self._client.delete_collection(request) - except Exception as e: - raise e + self.analyticdb_vector.delete() class AnalyticdbVectorFactory(AbstractVectorFactory): - def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings): + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> AnalyticdbVector: if dataset.index_struct_dict: class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] collection_name = class_prefix.lower() @@ -313,26 +65,9 @@ class AnalyticdbVectorFactory(AbstractVectorFactory): collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower() dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.ANALYTICDB, collection_name)) - # handle optional params - if dify_config.ANALYTICDB_KEY_ID is None: - raise ValueError("ANALYTICDB_KEY_ID should not be None") - if dify_config.ANALYTICDB_KEY_SECRET is None: - raise ValueError("ANALYTICDB_KEY_SECRET should not be None") - if dify_config.ANALYTICDB_REGION_ID is None: - raise ValueError("ANALYTICDB_REGION_ID should not be None") - if dify_config.ANALYTICDB_INSTANCE_ID is None: - raise ValueError("ANALYTICDB_INSTANCE_ID should not be None") - if dify_config.ANALYTICDB_ACCOUNT is None: - raise ValueError("ANALYTICDB_ACCOUNT should not be None") - if dify_config.ANALYTICDB_PASSWORD is None: - raise ValueError("ANALYTICDB_PASSWORD should not be None") - if dify_config.ANALYTICDB_NAMESPACE is None: - raise ValueError("ANALYTICDB_NAMESPACE should not be None") - if dify_config.ANALYTICDB_NAMESPACE_PASSWORD is None: - raise ValueError("ANALYTICDB_NAMESPACE_PASSWORD should not be None") - return AnalyticdbVector( - collection_name, - AnalyticdbConfig( + if dify_config.ANALYTICDB_HOST is None: + # implemented through OpenAPI + apiConfig = AnalyticdbVectorOpenAPIConfig( access_key_id=dify_config.ANALYTICDB_KEY_ID, access_key_secret=dify_config.ANALYTICDB_KEY_SECRET, region_id=dify_config.ANALYTICDB_REGION_ID, @@ -341,5 +76,22 @@ class AnalyticdbVectorFactory(AbstractVectorFactory): account_password=dify_config.ANALYTICDB_PASSWORD, namespace=dify_config.ANALYTICDB_NAMESPACE, namespace_password=dify_config.ANALYTICDB_NAMESPACE_PASSWORD, - ), + ) + sqlConfig = None + else: + # implemented through sql + sqlConfig = AnalyticdbVectorBySqlConfig( + host=dify_config.ANALYTICDB_HOST, + port=dify_config.ANALYTICDB_PORT, + account=dify_config.ANALYTICDB_ACCOUNT, + account_password=dify_config.ANALYTICDB_PASSWORD, + min_connection=dify_config.ANALYTICDB_MIN_CONNECTION, + max_connection=dify_config.ANALYTICDB_MAX_CONNECTION, + namespace=dify_config.ANALYTICDB_NAMESPACE, + ) + apiConfig = None + return AnalyticdbVector( + collection_name, + apiConfig, + sqlConfig, ) diff --git a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py new file mode 100644 index 0000000000..05e0ebc54f --- /dev/null +++ b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py @@ -0,0 +1,309 @@ +import json +from typing import Any + +from pydantic import BaseModel, model_validator + +_import_err_msg = ( + "`alibabacloud_gpdb20160503` and `alibabacloud_tea_openapi` packages not found, " + "please run `pip install alibabacloud_gpdb20160503 alibabacloud_tea_openapi`" +) + +from core.rag.models.document import Document +from extensions.ext_redis import redis_client + + +class AnalyticdbVectorOpenAPIConfig(BaseModel): + access_key_id: str + access_key_secret: str + region_id: str + instance_id: str + account: str + account_password: str + namespace: str = "dify" + namespace_password: str = (None,) + metrics: str = "cosine" + read_timeout: int = 60000 + + @model_validator(mode="before") + @classmethod + def validate_config(cls, values: dict) -> dict: + if not values["access_key_id"]: + raise ValueError("config ANALYTICDB_KEY_ID is required") + if not values["access_key_secret"]: + raise ValueError("config ANALYTICDB_KEY_SECRET is required") + if not values["region_id"]: + raise ValueError("config ANALYTICDB_REGION_ID is required") + if not values["instance_id"]: + raise ValueError("config ANALYTICDB_INSTANCE_ID is required") + if not values["account"]: + raise ValueError("config ANALYTICDB_ACCOUNT is required") + if not values["account_password"]: + raise ValueError("config ANALYTICDB_PASSWORD is required") + if not values["namespace_password"]: + raise ValueError("config ANALYTICDB_NAMESPACE_PASSWORD is required") + return values + + def to_analyticdb_client_params(self): + return { + "access_key_id": self.access_key_id, + "access_key_secret": self.access_key_secret, + "region_id": self.region_id, + "read_timeout": self.read_timeout, + } + + +class AnalyticdbVectorOpenAPI: + def __init__(self, collection_name: str, config: AnalyticdbVectorOpenAPIConfig): + try: + from alibabacloud_gpdb20160503.client import Client + from alibabacloud_tea_openapi import models as open_api_models + except: + raise ImportError(_import_err_msg) + self._collection_name = collection_name.lower() + self.config = config + self._client_config = open_api_models.Config(user_agent="dify", **config.to_analyticdb_client_params()) + self._client = Client(self._client_config) + self._initialize() + + def _initialize(self) -> None: + cache_key = f"vector_initialize_{self.config.instance_id}" + lock_name = f"{cache_key}_lock" + with redis_client.lock(lock_name, timeout=20): + database_exist_cache_key = f"vector_initialize_{self.config.instance_id}" + if redis_client.get(database_exist_cache_key): + return + self._initialize_vector_database() + self._create_namespace_if_not_exists() + redis_client.set(database_exist_cache_key, 1, ex=3600) + + def _initialize_vector_database(self) -> None: + from alibabacloud_gpdb20160503 import models as gpdb_20160503_models + + request = gpdb_20160503_models.InitVectorDatabaseRequest( + dbinstance_id=self.config.instance_id, + region_id=self.config.region_id, + manager_account=self.config.account, + manager_account_password=self.config.account_password, + ) + self._client.init_vector_database(request) + + def _create_namespace_if_not_exists(self) -> None: + from alibabacloud_gpdb20160503 import models as gpdb_20160503_models + from Tea.exceptions import TeaException + + try: + request = gpdb_20160503_models.DescribeNamespaceRequest( + dbinstance_id=self.config.instance_id, + region_id=self.config.region_id, + namespace=self.config.namespace, + manager_account=self.config.account, + manager_account_password=self.config.account_password, + ) + self._client.describe_namespace(request) + except TeaException as e: + if e.statusCode == 404: + request = gpdb_20160503_models.CreateNamespaceRequest( + dbinstance_id=self.config.instance_id, + region_id=self.config.region_id, + manager_account=self.config.account, + manager_account_password=self.config.account_password, + namespace=self.config.namespace, + namespace_password=self.config.namespace_password, + ) + self._client.create_namespace(request) + else: + raise ValueError(f"failed to create namespace {self.config.namespace}: {e}") + + def _create_collection_if_not_exists(self, embedding_dimension: int): + from alibabacloud_gpdb20160503 import models as gpdb_20160503_models + from Tea.exceptions import TeaException + + cache_key = f"vector_indexing_{self._collection_name}" + lock_name = f"{cache_key}_lock" + with redis_client.lock(lock_name, timeout=20): + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" + if redis_client.get(collection_exist_cache_key): + return + try: + request = gpdb_20160503_models.DescribeCollectionRequest( + dbinstance_id=self.config.instance_id, + region_id=self.config.region_id, + namespace=self.config.namespace, + namespace_password=self.config.namespace_password, + collection=self._collection_name, + ) + self._client.describe_collection(request) + except TeaException as e: + if e.statusCode == 404: + metadata = '{"ref_doc_id":"text","page_content":"text","metadata_":"jsonb"}' + full_text_retrieval_fields = "page_content" + request = gpdb_20160503_models.CreateCollectionRequest( + dbinstance_id=self.config.instance_id, + region_id=self.config.region_id, + manager_account=self.config.account, + manager_account_password=self.config.account_password, + namespace=self.config.namespace, + collection=self._collection_name, + dimension=embedding_dimension, + metrics=self.config.metrics, + metadata=metadata, + full_text_retrieval_fields=full_text_retrieval_fields, + ) + self._client.create_collection(request) + else: + raise ValueError(f"failed to create collection {self._collection_name}: {e}") + redis_client.set(collection_exist_cache_key, 1, ex=3600) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + from alibabacloud_gpdb20160503 import models as gpdb_20160503_models + + rows: list[gpdb_20160503_models.UpsertCollectionDataRequestRows] = [] + for doc, embedding in zip(documents, embeddings, strict=True): + metadata = { + "ref_doc_id": doc.metadata["doc_id"], + "page_content": doc.page_content, + "metadata_": json.dumps(doc.metadata), + } + rows.append( + gpdb_20160503_models.UpsertCollectionDataRequestRows( + vector=embedding, + metadata=metadata, + ) + ) + request = gpdb_20160503_models.UpsertCollectionDataRequest( + dbinstance_id=self.config.instance_id, + region_id=self.config.region_id, + namespace=self.config.namespace, + namespace_password=self.config.namespace_password, + collection=self._collection_name, + rows=rows, + ) + self._client.upsert_collection_data(request) + + def text_exists(self, id: str) -> bool: + from alibabacloud_gpdb20160503 import models as gpdb_20160503_models + + request = gpdb_20160503_models.QueryCollectionDataRequest( + dbinstance_id=self.config.instance_id, + region_id=self.config.region_id, + namespace=self.config.namespace, + namespace_password=self.config.namespace_password, + collection=self._collection_name, + metrics=self.config.metrics, + include_values=True, + vector=None, + content=None, + top_k=1, + filter=f"ref_doc_id='{id}'", + ) + response = self._client.query_collection_data(request) + return len(response.body.matches.match) > 0 + + def delete_by_ids(self, ids: list[str]) -> None: + from alibabacloud_gpdb20160503 import models as gpdb_20160503_models + + ids_str = ",".join(f"'{id}'" for id in ids) + ids_str = f"({ids_str})" + request = gpdb_20160503_models.DeleteCollectionDataRequest( + dbinstance_id=self.config.instance_id, + region_id=self.config.region_id, + namespace=self.config.namespace, + namespace_password=self.config.namespace_password, + collection=self._collection_name, + collection_data=None, + collection_data_filter=f"ref_doc_id IN {ids_str}", + ) + self._client.delete_collection_data(request) + + def delete_by_metadata_field(self, key: str, value: str) -> None: + from alibabacloud_gpdb20160503 import models as gpdb_20160503_models + + request = gpdb_20160503_models.DeleteCollectionDataRequest( + dbinstance_id=self.config.instance_id, + region_id=self.config.region_id, + namespace=self.config.namespace, + namespace_password=self.config.namespace_password, + collection=self._collection_name, + collection_data=None, + collection_data_filter=f"metadata_ ->> '{key}' = '{value}'", + ) + self._client.delete_collection_data(request) + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + from alibabacloud_gpdb20160503 import models as gpdb_20160503_models + + score_threshold = kwargs.get("score_threshold") or 0.0 + request = gpdb_20160503_models.QueryCollectionDataRequest( + dbinstance_id=self.config.instance_id, + region_id=self.config.region_id, + namespace=self.config.namespace, + namespace_password=self.config.namespace_password, + collection=self._collection_name, + include_values=kwargs.pop("include_values", True), + metrics=self.config.metrics, + vector=query_vector, + content=None, + top_k=kwargs.get("top_k", 4), + filter=None, + ) + response = self._client.query_collection_data(request) + documents = [] + for match in response.body.matches.match: + if match.score > score_threshold: + metadata = json.loads(match.metadata.get("metadata_")) + metadata["score"] = match.score + doc = Document( + page_content=match.metadata.get("page_content"), + vector=match.values.value, + metadata=metadata, + ) + documents.append(doc) + documents = sorted(documents, key=lambda x: x.metadata["score"], reverse=True) + return documents + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + from alibabacloud_gpdb20160503 import models as gpdb_20160503_models + + score_threshold = float(kwargs.get("score_threshold") or 0.0) + request = gpdb_20160503_models.QueryCollectionDataRequest( + dbinstance_id=self.config.instance_id, + region_id=self.config.region_id, + namespace=self.config.namespace, + namespace_password=self.config.namespace_password, + collection=self._collection_name, + include_values=kwargs.pop("include_values", True), + metrics=self.config.metrics, + vector=None, + content=query, + top_k=kwargs.get("top_k", 4), + filter=None, + ) + response = self._client.query_collection_data(request) + documents = [] + for match in response.body.matches.match: + if match.score > score_threshold: + metadata = json.loads(match.metadata.get("metadata_")) + metadata["score"] = match.score + doc = Document( + page_content=match.metadata.get("page_content"), + vector=match.values.value, + metadata=metadata, + ) + documents.append(doc) + documents = sorted(documents, key=lambda x: x.metadata["score"], reverse=True) + return documents + + def delete(self) -> None: + try: + from alibabacloud_gpdb20160503 import models as gpdb_20160503_models + + request = gpdb_20160503_models.DeleteCollectionRequest( + collection=self._collection_name, + dbinstance_id=self.config.instance_id, + namespace=self.config.namespace, + namespace_password=self.config.namespace_password, + region_id=self.config.region_id, + ) + self._client.delete_collection(request) + except Exception as e: + raise e diff --git a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py new file mode 100644 index 0000000000..e474db5cb2 --- /dev/null +++ b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py @@ -0,0 +1,245 @@ +import json +import uuid +from contextlib import contextmanager +from typing import Any + +import psycopg2.extras +import psycopg2.pool +from pydantic import BaseModel, model_validator + +from core.rag.models.document import Document +from extensions.ext_redis import redis_client + + +class AnalyticdbVectorBySqlConfig(BaseModel): + host: str + port: int + account: str + account_password: str + min_connection: int + max_connection: int + namespace: str = "dify" + metrics: str = "cosine" + + @model_validator(mode="before") + @classmethod + def validate_config(cls, values: dict) -> dict: + if not values["host"]: + raise ValueError("config ANALYTICDB_HOST is required") + if not values["port"]: + raise ValueError("config ANALYTICDB_PORT is required") + if not values["account"]: + raise ValueError("config ANALYTICDB_ACCOUNT is required") + if not values["account_password"]: + raise ValueError("config ANALYTICDB_PASSWORD is required") + if not values["min_connection"]: + raise ValueError("config ANALYTICDB_MIN_CONNECTION is required") + if not values["max_connection"]: + raise ValueError("config ANALYTICDB_MAX_CONNECTION is required") + if values["min_connection"] > values["max_connection"]: + raise ValueError("config ANALYTICDB_MIN_CONNECTION should less than ANALYTICDB_MAX_CONNECTION") + return values + + +class AnalyticdbVectorBySql: + def __init__(self, collection_name: str, config: AnalyticdbVectorBySqlConfig): + self._collection_name = collection_name.lower() + self.databaseName = "knowledgebase" + self.config = config + self.table_name = f"{self.config.namespace}.{self._collection_name}" + self.pool = None + self._initialize() + if not self.pool: + self.pool = self._create_connection_pool() + + def _initialize(self) -> None: + cache_key = f"vector_initialize_{self.config.host}" + lock_name = f"{cache_key}_lock" + with redis_client.lock(lock_name, timeout=20): + database_exist_cache_key = f"vector_initialize_{self.config.host}" + if redis_client.get(database_exist_cache_key): + return + self._initialize_vector_database() + redis_client.set(database_exist_cache_key, 1, ex=3600) + + def _create_connection_pool(self): + return psycopg2.pool.SimpleConnectionPool( + self.config.min_connection, + self.config.max_connection, + host=self.config.host, + port=self.config.port, + user=self.config.account, + password=self.config.account_password, + database=self.databaseName, + ) + + @contextmanager + def _get_cursor(self): + conn = self.pool.getconn() + cur = conn.cursor() + try: + yield cur + finally: + cur.close() + conn.commit() + self.pool.putconn(conn) + + def _initialize_vector_database(self) -> None: + conn = psycopg2.connect( + host=self.config.host, + port=self.config.port, + user=self.config.account, + password=self.config.account_password, + database="postgres", + ) + conn.autocommit = True + cur = conn.cursor() + try: + cur.execute(f"CREATE DATABASE {self.databaseName}") + except Exception as e: + if "already exists" in str(e): + return + raise e + finally: + cur.close() + conn.close() + self.pool = self._create_connection_pool() + with self._get_cursor() as cur: + try: + cur.execute("CREATE TEXT SEARCH CONFIGURATION zh_cn (PARSER = zhparser)") + cur.execute("ALTER TEXT SEARCH CONFIGURATION zh_cn ADD MAPPING FOR n,v,a,i,e,l,x WITH simple") + except Exception as e: + if "already exists" not in str(e): + raise e + cur.execute( + "CREATE OR REPLACE FUNCTION " + "public.to_tsquery_from_text(txt text, lang regconfig DEFAULT 'english'::regconfig) " + "RETURNS tsquery LANGUAGE sql IMMUTABLE STRICT AS $function$ " + "SELECT to_tsquery(lang, COALESCE(string_agg(split_part(word, ':', 1), ' | '), '')) " + "FROM (SELECT unnest(string_to_array(to_tsvector(lang, txt)::text, ' ')) AS word) " + "AS words_only;$function$" + ) + cur.execute(f"CREATE SCHEMA IF NOT EXISTS {self.config.namespace}") + + def _create_collection_if_not_exists(self, embedding_dimension: int): + cache_key = f"vector_indexing_{self._collection_name}" + lock_name = f"{cache_key}_lock" + with redis_client.lock(lock_name, timeout=20): + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" + if redis_client.get(collection_exist_cache_key): + return + with self._get_cursor() as cur: + cur.execute( + f"CREATE TABLE IF NOT EXISTS {self.table_name}(" + f"id text PRIMARY KEY," + f"vector real[], ref_doc_id text, page_content text, metadata_ jsonb, " + f"to_tsvector TSVECTOR" + f") WITH (fillfactor=70) DISTRIBUTED BY (id);" + ) + if embedding_dimension is not None: + index_name = f"{self._collection_name}_embedding_idx" + cur.execute(f"ALTER TABLE {self.table_name} ALTER COLUMN vector SET STORAGE PLAIN") + cur.execute( + f"CREATE INDEX {index_name} ON {self.table_name} USING ann(vector) " + f"WITH(dim='{embedding_dimension}', distancemeasure='{self.config.metrics}', " + f"pq_enable=0, external_storage=0)" + ) + cur.execute(f"CREATE INDEX ON {self.table_name} USING gin(to_tsvector)") + redis_client.set(collection_exist_cache_key, 1, ex=3600) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + values = [] + id_prefix = str(uuid.uuid4()) + "_" + sql = f""" + INSERT INTO {self.table_name} + (id, ref_doc_id, vector, page_content, metadata_, to_tsvector) + VALUES (%s, %s, %s, %s, %s, to_tsvector('zh_cn', %s)); + """ + for i, doc in enumerate(documents): + values.append( + ( + id_prefix + str(i), + doc.metadata.get("doc_id", str(uuid.uuid4())), + embeddings[i], + doc.page_content, + json.dumps(doc.metadata), + doc.page_content, + ) + ) + with self._get_cursor() as cur: + psycopg2.extras.execute_batch(cur, sql, values) + + def text_exists(self, id: str) -> bool: + with self._get_cursor() as cur: + cur.execute(f"SELECT id FROM {self.table_name} WHERE ref_doc_id = %s", (id,)) + return cur.fetchone() is not None + + def delete_by_ids(self, ids: list[str]) -> None: + with self._get_cursor() as cur: + try: + cur.execute(f"DELETE FROM {self.table_name} WHERE ref_doc_id IN %s", (tuple(ids),)) + except Exception as e: + if "does not exist" not in str(e): + raise e + + def delete_by_metadata_field(self, key: str, value: str) -> None: + with self._get_cursor() as cur: + try: + cur.execute(f"DELETE FROM {self.table_name} WHERE metadata_->>%s = %s", (key, value)) + except Exception as e: + if "does not exist" not in str(e): + raise e + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + top_k = kwargs.get("top_k", 4) + score_threshold = float(kwargs.get("score_threshold") or 0.0) + with self._get_cursor() as cur: + query_vector_str = json.dumps(query_vector) + query_vector_str = "{" + query_vector_str[1:-1] + "}" + cur.execute( + f"SELECT t.id AS id, t.vector AS vector, (1.0 - t.score) AS score, " + f"t.page_content as page_content, t.metadata_ AS metadata_ " + f"FROM (SELECT id, vector, page_content, metadata_, vector <=> %s AS score " + f"FROM {self.table_name} ORDER BY score LIMIT {top_k} ) t", + (query_vector_str,), + ) + documents = [] + for record in cur: + id, vector, score, page_content, metadata = record + if score > score_threshold: + metadata["score"] = score + doc = Document( + page_content=page_content, + vector=vector, + metadata=metadata, + ) + documents.append(doc) + return documents + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + top_k = kwargs.get("top_k", 4) + with self._get_cursor() as cur: + cur.execute( + f"""SELECT id, vector, page_content, metadata_, + ts_rank(to_tsvector, to_tsquery_from_text(%s, 'zh_cn'), 32) AS score + FROM {self.table_name} + WHERE to_tsvector@@to_tsquery_from_text(%s, 'zh_cn') + ORDER BY score DESC + LIMIT {top_k}""", + (f"'{query}'", f"'{query}'"), + ) + documents = [] + for record in cur: + id, vector, page_content, metadata, score = record + metadata["score"] = score + doc = Document( + page_content=page_content, + vector=vector, + metadata=metadata, + ) + documents.append(doc) + return documents + + def delete(self) -> None: + with self._get_cursor() as cur: + cur.execute(f"DROP TABLE IF EXISTS {self.table_name}") diff --git a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py index 98da5e3d5e..d26726e864 100644 --- a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py +++ b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py @@ -242,7 +242,7 @@ class CouchbaseVector(BaseVector): try: self._cluster.query(query, named_parameters={"doc_ids": ids}).execute() except Exception as e: - logger.exception(e) + logger.exception(f"Failed to delete documents, ids: {ids}") def delete_by_document_id(self, document_id: str): query = f""" diff --git a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py index 79b827797c..08234c0c91 100644 --- a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py +++ b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py @@ -81,7 +81,7 @@ class LindormVectorStore(BaseVector): "ids": batch_ids}, _source=False) return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} except Exception as e: - logger.exception(f"Error fetching batch {batch_ids}: {e}") + logger.exception(f"Error fetching batch {batch_ids}") return set() @retry(stop=stop_after_attempt(3), wait=wait_fixed(60)) @@ -99,7 +99,7 @@ class LindormVectorStore(BaseVector): ) return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} except Exception as e: - logger.exception(f"Error fetching batch {batch_ids}: {e}") + logger.exception(f"Error fetching batch ids: {batch_ids}") return set() if ids is None: @@ -187,7 +187,7 @@ class LindormVectorStore(BaseVector): logger.warning( f"Index '{self._collection_name}' does not exist. No deletion performed.") except Exception as e: - logger.exception(f"Error occurred while deleting the index: {e}") + logger.exception(f"Error occurred while deleting the index: {self._collection_name}") raise e def text_exists(self, id: str) -> bool: @@ -213,7 +213,7 @@ class LindormVectorStore(BaseVector): response = self._client.search( index=self._collection_name, body=query) except Exception as e: - logger.exception(f"Error executing search: {e}") + logger.exception(f"Error executing vector search, query: {query}") raise docs_and_scores = [] diff --git a/api/core/rag/datasource/vdb/myscale/myscale_vector.py b/api/core/rag/datasource/vdb/myscale/myscale_vector.py index 2610b60a77..b7b6b803ad 100644 --- a/api/core/rag/datasource/vdb/myscale/myscale_vector.py +++ b/api/core/rag/datasource/vdb/myscale/myscale_vector.py @@ -142,7 +142,7 @@ class MyScaleVector(BaseVector): for r in self._client.query(sql).named_results() ] except Exception as e: - logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") + logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") # noqa:TRY401 return [] def delete(self) -> None: diff --git a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py index 49eb00f140..7a976d7c3c 100644 --- a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py +++ b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py @@ -158,7 +158,7 @@ class OpenSearchVector(BaseVector): try: response = self._client.search(index=self._collection_name.lower(), body=query) except Exception as e: - logger.exception(f"Error executing search: {e}") + logger.exception(f"Error executing vector search, query: {query}") raise docs = [] diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py index 3ac65b88bb..1157c5c8e4 100644 --- a/api/core/rag/embedding/cached_embedding.py +++ b/api/core/rag/embedding/cached_embedding.py @@ -69,7 +69,7 @@ class CacheEmbedding(Embeddings): except IntegrityError: db.session.rollback() except Exception as e: - logging.exception("Failed transform embedding: %s", e) + logging.exception("Failed transform embedding") cache_embeddings = [] try: for i, embedding in zip(embedding_queue_indices, embedding_queue_embeddings): @@ -89,7 +89,7 @@ class CacheEmbedding(Embeddings): db.session.rollback() except Exception as ex: db.session.rollback() - logger.exception("Failed to embed documents: %s", ex) + logger.exception("Failed to embed documents: %s") raise ex return text_embeddings @@ -112,7 +112,7 @@ class CacheEmbedding(Embeddings): embedding_results = (embedding_results / np.linalg.norm(embedding_results)).tolist() except Exception as ex: if dify_config.DEBUG: - logging.exception(f"Failed to embed query text: {ex}") + logging.exception(f"Failed to embed query text '{text[:10]}...({len(text)} chars)'") raise ex try: @@ -126,7 +126,7 @@ class CacheEmbedding(Embeddings): redis_client.setex(embedding_cache_key, 600, encoded_str) except Exception as ex: if dify_config.DEBUG: - logging.exception("Failed to add embedding to redis %s", ex) + logging.exception(f"Failed to add embedding to redis for the text '{text[:10]}...({len(text)} chars)'") raise ex return embedding_results diff --git a/api/core/rag/extractor/word_extractor.py b/api/core/rag/extractor/word_extractor.py index 8e084ab4ff..313bdce48b 100644 --- a/api/core/rag/extractor/word_extractor.py +++ b/api/core/rag/extractor/word_extractor.py @@ -229,7 +229,7 @@ class WordExtractor(BaseExtractor): for i in url_pattern.findall(x.text): hyperlinks_url = str(i) except Exception as e: - logger.exception(e) + logger.exception("Failed to parse HYPERLINK xml") def parse_paragraph(paragraph): paragraph_content = [] diff --git a/api/core/rag/index_processor/processor/paragraph_index_processor.py b/api/core/rag/index_processor/processor/paragraph_index_processor.py index ed5712220f..a631f953ce 100644 --- a/api/core/rag/index_processor/processor/paragraph_index_processor.py +++ b/api/core/rag/index_processor/processor/paragraph_index_processor.py @@ -11,6 +11,7 @@ from core.rag.extractor.entity.extract_setting import ExtractSetting from core.rag.extractor.extract_processor import ExtractProcessor from core.rag.index_processor.index_processor_base import BaseIndexProcessor from core.rag.models.document import Document +from core.tools.utils.text_processing_utils import remove_leading_symbols from libs import helper from models.dataset import Dataset @@ -43,11 +44,7 @@ class ParagraphIndexProcessor(BaseIndexProcessor): document_node.metadata["doc_id"] = doc_id document_node.metadata["doc_hash"] = hash # delete Splitter character - page_content = document_node.page_content - if page_content.startswith(".") or page_content.startswith("。"): - page_content = page_content[1:].strip() - else: - page_content = page_content + page_content = remove_leading_symbols(document_node.page_content).strip() if len(page_content) > 0: document_node.page_content = page_content split_documents.append(document_node) diff --git a/api/core/rag/index_processor/processor/qa_index_processor.py b/api/core/rag/index_processor/processor/qa_index_processor.py index 1dbc473281..320f0157a1 100644 --- a/api/core/rag/index_processor/processor/qa_index_processor.py +++ b/api/core/rag/index_processor/processor/qa_index_processor.py @@ -18,6 +18,7 @@ from core.rag.extractor.entity.extract_setting import ExtractSetting from core.rag.extractor.extract_processor import ExtractProcessor from core.rag.index_processor.index_processor_base import BaseIndexProcessor from core.rag.models.document import Document +from core.tools.utils.text_processing_utils import remove_leading_symbols from libs import helper from models.dataset import Dataset @@ -53,11 +54,7 @@ class QAIndexProcessor(BaseIndexProcessor): document_node.metadata["doc_hash"] = hash # delete Splitter character page_content = document_node.page_content - if page_content.startswith(".") or page_content.startswith("。"): - page_content = page_content[1:] - else: - page_content = page_content - document_node.page_content = page_content + document_node.page_content = remove_leading_symbols(page_content) split_documents.append(document_node) all_documents.extend(split_documents) for i in range(0, len(all_documents), 10): @@ -159,7 +156,7 @@ class QAIndexProcessor(BaseIndexProcessor): qa_documents.append(qa_document) format_documents.extend(qa_documents) except Exception as e: - logging.exception(e) + logging.exception("Failed to format qa document") all_qa_documents.extend(format_documents) diff --git a/api/core/rag/rerank/weight_rerank.py b/api/core/rag/rerank/weight_rerank.py index 2e3fbe04e2..b706f29bb1 100644 --- a/api/core/rag/rerank/weight_rerank.py +++ b/api/core/rag/rerank/weight_rerank.py @@ -36,23 +36,21 @@ class WeightRerankRunner(BaseRerankRunner): :return: """ - docs = [] - doc_id = [] unique_documents = [] + doc_id = set() for document in documents: - if document.metadata["doc_id"] not in doc_id: - doc_id.append(document.metadata["doc_id"]) - docs.append(document.page_content) + doc_id = document.metadata.get("doc_id") + if doc_id not in doc_id: + doc_id.add(doc_id) unique_documents.append(document) documents = unique_documents - rerank_documents = [] query_scores = self._calculate_keyword_score(query, documents) - query_vector_scores = self._calculate_cosine(self.tenant_id, query, documents, self.weights.vector_setting) + + rerank_documents = [] for document, query_score, query_vector_score in zip(documents, query_scores, query_vector_scores): - # format document score = ( self.weights.vector_setting.vector_weight * query_vector_score + self.weights.keyword_setting.keyword_weight * query_score @@ -61,7 +59,8 @@ class WeightRerankRunner(BaseRerankRunner): continue document.metadata["score"] = score rerank_documents.append(document) - rerank_documents = sorted(rerank_documents, key=lambda x: x.metadata["score"], reverse=True) + + rerank_documents.sort(key=lambda x: x.metadata["score"], reverse=True) return rerank_documents[:top_n] if top_n else rerank_documents def _calculate_keyword_score(self, query: str, documents: list[Document]) -> list[float]: diff --git a/api/core/tools/custom_tool/tool.py b/api/core/tools/custom_tool/tool.py index a26b3763a2..e8594b5847 100644 --- a/api/core/tools/custom_tool/tool.py +++ b/api/core/tools/custom_tool/tool.py @@ -6,6 +6,7 @@ from urllib.parse import urlencode import httpx +from core.file.file_manager import download from core.helper import ssrf_proxy from core.tools.__base.tool import Tool from core.tools.__base.tool_runtime import ToolRuntime @@ -145,6 +146,7 @@ class ApiTool(Tool): path_params = {} body = {} cookies = {} + files = [] # check parameters for parameter in self.api_bundle.openapi.get("parameters", []): @@ -173,8 +175,12 @@ class ApiTool(Tool): properties = body_schema.get("properties", {}) for name, property in properties.items(): if name in parameters: - # convert type - body[name] = self._convert_body_property_type(property, parameters[name]) + if property.get("format") == "binary": + f = parameters[name] + files.append((name, (f.filename, download(f), f.mime_type))) + else: + # convert type + body[name] = self._convert_body_property_type(property, parameters[name]) elif name in required: raise ToolParameterValidationError( f"Missing required parameter {name} in operation {self.api_bundle.operation_id}" @@ -189,7 +195,7 @@ class ApiTool(Tool): for name, value in path_params.items(): url = url.replace(f"{{{name}}}", f"{value}") - # parse http body data if needed, for GET/HEAD/OPTIONS/TRACE, the body is ignored + # parse http body data if needed if "Content-Type" in headers: if headers["Content-Type"] == "application/json": body = json.dumps(body) @@ -205,6 +211,7 @@ class ApiTool(Tool): headers=headers, cookies=cookies, data=body, + files=files, timeout=API_TOOL_DEFAULT_TIMEOUT, follow_redirects=True, ) diff --git a/api/core/tools/tool_file_manager.py b/api/core/tools/tool_file_manager.py index daa5d0242d..a028668c07 100644 --- a/api/core/tools/tool_file_manager.py +++ b/api/core/tools/tool_file_manager.py @@ -98,7 +98,7 @@ class ToolFileManager: response.raise_for_status() blob = response.content except Exception as e: - logger.exception(f"Failed to download file from {file_url}: {e}") + logger.exception(f"Failed to download file from {file_url}") raise mimetype = guess_type(file_url)[0] or "octet/stream" diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 7ed98913d8..d4a6878fcd 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -526,7 +526,7 @@ class ToolManager: yield provider except Exception as e: - logger.exception(f"load builtin provider {provider} error: {e}") + logger.exception(f"load builtin provider {provider}") continue # set builtin providers loaded cls._builtin_providers_loaded = True diff --git a/api/core/tools/utils/message_transformer.py b/api/core/tools/utils/message_transformer.py index e4196095b0..876f0486b3 100644 --- a/api/core/tools/utils/message_transformer.py +++ b/api/core/tools/utils/message_transformer.py @@ -49,7 +49,7 @@ class ToolFileMessageTransformer: meta=message.meta.copy() if message.meta is not None else {}, ) except Exception as e: - logger.exception(e) + logger.exception(f"Failed to download image from {url}") yield ToolInvokeMessage( type=ToolInvokeMessage.MessageType.TEXT, message=ToolInvokeMessage.TextMessage( diff --git a/api/core/tools/utils/parser.py b/api/core/tools/utils/parser.py index b02a4f75d0..3e53ca6223 100644 --- a/api/core/tools/utils/parser.py +++ b/api/core/tools/utils/parser.py @@ -160,6 +160,9 @@ class ApiBasedToolSchemaParser: def _get_tool_parameter_type(parameter: dict) -> ToolParameter.ToolParameterType: parameter = parameter or {} typ = None + if parameter.get("format") == "binary": + return ToolParameter.ToolParameterType.FILE + if "type" in parameter: typ = parameter["type"] elif "schema" in parameter and "type" in parameter["schema"]: diff --git a/api/core/tools/utils/text_processing_utils.py b/api/core/tools/utils/text_processing_utils.py new file mode 100644 index 0000000000..6db9dfd0d9 --- /dev/null +++ b/api/core/tools/utils/text_processing_utils.py @@ -0,0 +1,16 @@ +import re + + +def remove_leading_symbols(text: str) -> str: + """ + Remove leading punctuation or symbols from the given text. + + Args: + text (str): The input text to process. + + Returns: + str: The text with leading punctuation or symbols removed. + """ + # Match Unicode ranges for punctuation and symbols + pattern = r"^[\u2000-\u206F\u2E00-\u2E7F\u3000-\u303F!\"#$%&'()*+,\-./:;<=>?@\[\]^_`{|}~]+" + return re.sub(pattern, "", text) diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index d7838eae57..b49f6faa50 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -199,7 +199,7 @@ class WorkflowTool(Tool): files.append(file_dict) except Exception as e: - logger.exception(e) + logger.exception(f"Failed to transform file {file}") else: parameters_result[parameter.name] = tool_parameters.get(parameter.name) diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index f07ad4de11..60a5901b21 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -172,7 +172,7 @@ class GraphEngine: "answer" ].strip() except Exception as e: - logger.exception(f"Graph run failed: {str(e)}") + logger.exception("Graph run failed") yield GraphRunFailedEvent(error=str(e)) return @@ -692,7 +692,7 @@ class GraphEngine: ) return except Exception as e: - logger.exception(f"Node {node_instance.node_data.title} run failed: {str(e)}") + logger.exception(f"Node {node_instance.node_data.title} run failed") raise e finally: db.session.close() diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py index 1433c8eaed..1871fff618 100644 --- a/api/core/workflow/nodes/base/node.py +++ b/api/core/workflow/nodes/base/node.py @@ -69,7 +69,7 @@ class BaseNode(Generic[GenericNodeData]): try: result = self._run() except Exception as e: - logger.exception(f"Node {self.node_id} failed to run: {e}") + logger.exception(f"Node {self.node_id} failed to run") result = NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, error=str(e), diff --git a/api/core/workflow/nodes/http_request/entities.py b/api/core/workflow/nodes/http_request/entities.py index 36ded104c1..5e39ef79d1 100644 --- a/api/core/workflow/nodes/http_request/entities.py +++ b/api/core/workflow/nodes/http_request/entities.py @@ -1,4 +1,6 @@ +import mimetypes from collections.abc import Sequence +from email.message import Message from typing import Any, Literal, Optional import httpx @@ -7,14 +9,6 @@ from pydantic import BaseModel, Field, ValidationInfo, field_validator from configs import dify_config from core.workflow.nodes.base import BaseNodeData -NON_FILE_CONTENT_TYPES = ( - "application/json", - "application/xml", - "text/html", - "text/plain", - "application/x-www-form-urlencoded", -) - class HttpRequestNodeAuthorizationConfig(BaseModel): type: Literal["basic", "bearer", "custom"] @@ -93,13 +87,53 @@ class Response: @property def is_file(self): - content_type = self.content_type + """ + Determine if the response contains a file by checking: + 1. Content-Disposition header (RFC 6266) + 2. Content characteristics + 3. MIME type analysis + """ + content_type = self.content_type.split(";")[0].strip().lower() content_disposition = self.response.headers.get("content-disposition", "") - return "attachment" in content_disposition or ( - not any(non_file in content_type for non_file in NON_FILE_CONTENT_TYPES) - and any(file_type in content_type for file_type in ("application/", "image/", "audio/", "video/")) - ) + # Check if it's explicitly marked as an attachment + if content_disposition: + msg = Message() + msg["content-disposition"] = content_disposition + disp_type = msg.get_content_disposition() # Returns 'attachment', 'inline', or None + filename = msg.get_filename() # Returns filename if present, None otherwise + if disp_type == "attachment" or filename is not None: + return True + + # For application types, try to detect if it's a text-based format + if content_type.startswith("application/"): + # Common text-based application types + if any( + text_type in content_type + for text_type in ("json", "xml", "javascript", "x-www-form-urlencoded", "yaml", "graphql") + ): + return False + + # Try to detect if content is text-based by sampling first few bytes + try: + # Sample first 1024 bytes for text detection + content_sample = self.response.content[:1024] + content_sample.decode("utf-8") + # If we can decode as UTF-8 and find common text patterns, likely not a file + text_markers = (b"{", b"[", b"<", b"function", b"var ", b"const ", b"let ") + if any(marker in content_sample for marker in text_markers): + return False + except UnicodeDecodeError: + # If we can't decode as UTF-8, likely a binary file + return True + + # For other types, use MIME type analysis + main_type, _ = mimetypes.guess_type("dummy" + (mimetypes.guess_extension(content_type) or "")) + if main_type: + return main_type.split("/")[0] in ("application", "image", "audio", "video") + + # For unknown types, check if it's a media type + return any(media_type in content_type for media_type in ("image/", "audio/", "video/")) @property def content_type(self) -> str: diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index 42012eee8e..1b78e36a57 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -68,6 +68,7 @@ def init_app(app: Flask) -> Celery: "schedule.clean_unused_datasets_task", "schedule.create_tidb_serverless_task", "schedule.update_tidb_serverless_status_task", + "schedule.clean_messages", ] day = dify_config.CELERY_BEAT_SCHEDULER_TIME beat_schedule = { @@ -87,6 +88,10 @@ def init_app(app: Flask) -> Celery: "task": "schedule.update_tidb_serverless_status_task.update_tidb_serverless_status_task", "schedule": crontab(minute="30", hour="*"), }, + "clean_messages": { + "task": "schedule.clean_messages.clean_messages", + "schedule": timedelta(days=day), + }, } celery_app.conf.update(beat_schedule=beat_schedule, imports=imports) diff --git a/api/extensions/ext_redis.py b/api/extensions/ext_redis.py index 141a91a458..c5d75b89fe 100644 --- a/api/extensions/ext_redis.py +++ b/api/extensions/ext_redis.py @@ -1,11 +1,12 @@ import redis +from redis.cluster import ClusterNode, RedisCluster from redis.connection import Connection, SSLConnection from redis.sentinel import Sentinel from configs import dify_config -class RedisClientWrapper(redis.Redis): +class RedisClientWrapper: """ A wrapper class for the Redis client that addresses the issue where the global `redis_client` variable cannot be updated when a new Redis instance is returned @@ -71,6 +72,12 @@ def init_app(app): ) master = sentinel.master_for(dify_config.REDIS_SENTINEL_SERVICE_NAME, **redis_params) redis_client.initialize(master) + elif dify_config.REDIS_USE_CLUSTERS: + nodes = [ + ClusterNode(host=node.split(":")[0], port=int(node.split.split(":")[1])) + for node in dify_config.REDIS_CLUSTERS.split(",") + ] + redis_client.initialize(RedisCluster(startup_nodes=nodes, password=dify_config.REDIS_CLUSTERS_PASSWORD)) else: redis_params.update( { diff --git a/api/extensions/ext_storage.py b/api/extensions/ext_storage.py index 86fadf23d7..fa88da68b7 100644 --- a/api/extensions/ext_storage.py +++ b/api/extensions/ext_storage.py @@ -70,7 +70,7 @@ class Storage: try: self.storage_runner.save(filename, data) except Exception as e: - logging.exception("Failed to save file: %s", e) + logging.exception(f"Failed to save file {filename}") raise e def load(self, filename: str, /, *, stream: bool = False) -> Union[bytes, Generator]: @@ -80,42 +80,42 @@ class Storage: else: return self.load_once(filename) except Exception as e: - logging.exception("Failed to load file: %s", e) + logging.exception(f"Failed to load file {filename}") raise e def load_once(self, filename: str) -> bytes: try: return self.storage_runner.load_once(filename) except Exception as e: - logging.exception("Failed to load_once file: %s", e) + logging.exception(f"Failed to load_once file {filename}") raise e def load_stream(self, filename: str) -> Generator: try: return self.storage_runner.load_stream(filename) except Exception as e: - logging.exception("Failed to load_stream file: %s", e) + logging.exception(f"Failed to load_stream file {filename}") raise e def download(self, filename, target_filepath): try: self.storage_runner.download(filename, target_filepath) except Exception as e: - logging.exception("Failed to download file: %s", e) + logging.exception(f"Failed to download file {filename}") raise e def exists(self, filename): try: return self.storage_runner.exists(filename) except Exception as e: - logging.exception("Failed to check file exists: %s", e) + logging.exception(f"Failed to check file exists {filename}") raise e def delete(self, filename): try: return self.storage_runner.delete(filename) except Exception as e: - logging.exception("Failed to delete file: %s", e) + logging.exception(f"Failed to delete file {filename}") raise e diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py index 15e9d7f34f..8cb45f194b 100644 --- a/api/factories/file_factory.py +++ b/api/factories/file_factory.py @@ -169,6 +169,7 @@ def _get_remote_file_info(url: str): mime_type = mimetypes.guess_type(url)[0] or "" file_size = -1 filename = url.split("/")[-1].split("?")[0] or "unknown_file" + mime_type = mime_type or mimetypes.guess_type(filename)[0] resp = ssrf_proxy.head(url, follow_redirects=True) if resp.status_code == httpx.codes.OK: @@ -233,10 +234,10 @@ def _is_file_valid_with_config(*, file: File, config: FileUploadConfig) -> bool: if config.allowed_file_types and file.type not in config.allowed_file_types and file.type != FileType.CUSTOM: return False - if config.allowed_extensions and file.extension not in config.allowed_extensions: + if config.allowed_file_extensions and file.extension not in config.allowed_file_extensions: return False - if config.allowed_upload_methods and file.transfer_method not in config.allowed_upload_methods: + if config.allowed_file_upload_methods and file.transfer_method not in config.allowed_file_upload_methods: return False if file.type == FileType.IMAGE and config.image_config: diff --git a/api/libs/smtp.py b/api/libs/smtp.py index d57d99f3b7..2325d69a41 100644 --- a/api/libs/smtp.py +++ b/api/libs/smtp.py @@ -39,13 +39,13 @@ class SMTPClient: smtp.sendmail(self._from, mail["to"], msg.as_string()) except smtplib.SMTPException as e: - logging.exception(f"SMTP error occurred: {str(e)}") + logging.exception("SMTP error occurred") raise except TimeoutError as e: - logging.exception(f"Timeout occurred while sending email: {str(e)}") + logging.exception("Timeout occurred while sending email") raise except Exception as e: - logging.exception(f"Unexpected error occurred while sending email: {str(e)}") + logging.exception(f"Unexpected error occurred while sending email to {mail['to']}") raise finally: if smtp: diff --git a/api/migrations/versions/2024_11_12_0925-01d6889832f7_add_created_at_index_for_messages.py b/api/migrations/versions/2024_11_12_0925-01d6889832f7_add_created_at_index_for_messages.py new file mode 100644 index 0000000000..d94508edcf --- /dev/null +++ b/api/migrations/versions/2024_11_12_0925-01d6889832f7_add_created_at_index_for_messages.py @@ -0,0 +1,31 @@ +"""add_created_at_index_for_messages + +Revision ID: 01d6889832f7 +Revises: 09a8d1878d9b +Create Date: 2024-11-12 09:25:05.527827 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '01d6889832f7' +down_revision = '09a8d1878d9b' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.create_index('message_created_at_idx', ['created_at'], unique=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.drop_index('message_created_at_idx') + # ### end Alembic commands ### diff --git a/api/models/dataset.py b/api/models/dataset.py index a1a626d7e4..a8b2c419d1 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -679,7 +679,7 @@ class DatasetKeywordTable(db.Model): return json.loads(keyword_table_text.decode("utf-8"), cls=SetDecoder) return None except Exception as e: - logging.exception(str(e)) + logging.exception(f"Failed to load keyword table from file: {file_key}") return None diff --git a/api/models/model.py b/api/models/model.py index 1d3468566d..1a9ebbc0a6 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -732,6 +732,7 @@ class Message(Base): Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"), Index("message_account_idx", "app_id", "from_source", "from_account_id"), Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"), + Index("message_created_at_idx", "created_at"), ) id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) diff --git a/api/models/workflow.py b/api/models/workflow.py index 1ad94d163b..721082d06d 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -173,9 +173,9 @@ class Workflow(Base): ) features["file_upload"]["enabled"] = image_enabled features["file_upload"]["number_limits"] = image_number_limits - features["file_upload"]["allowed_upload_methods"] = image_transfer_methods + features["file_upload"]["allowed_file_upload_methods"] = image_transfer_methods features["file_upload"]["allowed_file_types"] = ["image"] - features["file_upload"]["allowed_extensions"] = [] + features["file_upload"]["allowed_file_extensions"] = [] del features["file_upload"]["image"] self._features = json.dumps(features) return self._features diff --git a/api/poetry.lock b/api/poetry.lock index 034bf45785..dc9ebd1b75 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -410,13 +410,13 @@ aliyun-python-sdk-core = ">=2.11.5" [[package]] name = "amqp" -version = "5.2.0" +version = "5.3.1" description = "Low-level AMQP client for Python (fork of amqplib)." optional = false python-versions = ">=3.6" files = [ - {file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"}, - {file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"}, + {file = "amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2"}, + {file = "amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432"}, ] [package.dependencies] @@ -571,13 +571,13 @@ cryptography = "*" [[package]] name = "azure-ai-inference" -version = "1.0.0b5" -description = "Microsoft Azure Ai Inference Client Library for Python" +version = "1.0.0b6" +description = "Microsoft Azure AI Inference Client Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure_ai_inference-1.0.0b5-py3-none-any.whl", hash = "sha256:0147653088033f1fd059d5f4bd0fedac82529fdcc7a0d2183d9508b3f80cf549"}, - {file = "azure_ai_inference-1.0.0b5.tar.gz", hash = "sha256:c95b490bcd670ccdeb1048dc2b45e0f8252a4d69a348ca15d4510d327b64dd0d"}, + {file = "azure_ai_inference-1.0.0b6-py3-none-any.whl", hash = "sha256:5699ad78d70ec2d227a5eff2c1bafc845018f6624edc5b03589dfff861c54958"}, + {file = "azure_ai_inference-1.0.0b6.tar.gz", hash = "sha256:b8ac941de1e69151bad464191e18856d4e74f962ae03235da137a9a326143676"}, ] [package.dependencies] @@ -587,6 +587,7 @@ typing-extensions = ">=4.6.0" [package.extras] opentelemetry = ["azure-core-tracing-opentelemetry"] +prompts = ["pyyaml"] [[package]] name = "azure-ai-ml" @@ -719,13 +720,13 @@ msrest = ">=0.6.21" [[package]] name = "azure-storage-file-share" -version = "12.19.0" +version = "12.20.0" description = "Microsoft Azure Azure File Share Storage Client Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure_storage_file_share-12.19.0-py3-none-any.whl", hash = "sha256:eac6cf1a454aba58af4e6ba450b36d16aa1d0c49679fb64ea8756bb896698c5b"}, - {file = "azure_storage_file_share-12.19.0.tar.gz", hash = "sha256:ea7a4174dc6c52f50ac8c30f228159fcc3675d1f8ba771b8d0efcbc310740278"}, + {file = "azure_storage_file_share-12.20.0-py3-none-any.whl", hash = "sha256:fd5c4f09d7784d68b8ed3de473b7525904f1c4b115f9cd200c838b0ee720cb5f"}, + {file = "azure_storage_file_share-12.20.0.tar.gz", hash = "sha256:f120fc67bae0a84c1b54d06faa70df351be14d1395b9a085350e833f7d347a65"}, ] [package.dependencies] @@ -865,13 +866,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.57" +version = "1.35.63" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.57-py3-none-any.whl", hash = "sha256:92ddd02469213766872cb2399269dd20948f90348b42bf08379881d5e946cc34"}, - {file = "botocore-1.35.57.tar.gz", hash = "sha256:d96306558085baf0bcb3b022d7a8c39c93494f031edb376694d2b2dcd0e81327"}, + {file = "botocore-1.35.63-py3-none-any.whl", hash = "sha256:0ca1200694a4c0a3fa846795d8e8a08404c214e21195eb9e010c4b8a4ca78a4a"}, + {file = "botocore-1.35.63.tar.gz", hash = "sha256:2b8196bab0a997d206c3d490b52e779ef47dffb68c57c685443f77293aca1589"}, ] [package.dependencies] @@ -950,10 +951,6 @@ files = [ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"}, {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, @@ -966,14 +963,8 @@ files = [ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"}, {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, @@ -984,24 +975,8 @@ files = [ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"}, {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, - {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"}, - {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"}, - {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"}, - {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"}, {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, @@ -1011,10 +986,6 @@ files = [ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"}, {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, @@ -1026,10 +997,6 @@ files = [ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"}, {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, @@ -1042,10 +1009,6 @@ files = [ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"}, {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, @@ -1058,10 +1021,6 @@ files = [ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"}, {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, @@ -1763,76 +1722,65 @@ cron = ["capturer (>=2.4)"] [[package]] name = "contourpy" -version = "1.3.0" +version = "1.3.1" description = "Python library for calculating contours of 2D quadrilateral grids" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" files = [ - {file = "contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7"}, - {file = "contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41"}, - {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d"}, - {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223"}, - {file = "contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f"}, - {file = "contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b"}, - {file = "contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad"}, - {file = "contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d"}, - {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c"}, - {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb"}, - {file = "contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c"}, - {file = "contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67"}, - {file = "contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f"}, - {file = "contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09"}, - {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd"}, - {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35"}, - {file = "contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb"}, - {file = "contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b"}, - {file = "contourpy-1.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3e1c7fa44aaae40a2247e2e8e0627f4bea3dd257014764aa644f319a5f8600e3"}, - {file = "contourpy-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:364174c2a76057feef647c802652f00953b575723062560498dc7930fc9b1cb7"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b238b3b3b649e09ce9aaf51f0c261d38644bdfa35cbaf7b263457850957a84"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d51fca85f9f7ad0b65b4b9fe800406d0d77017d7270d31ec3fb1cc07358fdea0"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:732896af21716b29ab3e988d4ce14bc5133733b85956316fb0c56355f398099b"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73f659398a0904e125280836ae6f88ba9b178b2fed6884f3b1f95b989d2c8da"}, - {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6c7c2408b7048082932cf4e641fa3b8ca848259212f51c8c59c45aa7ac18f14"}, - {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f317576606de89da6b7e0861cf6061f6146ead3528acabff9236458a6ba467f8"}, - {file = "contourpy-1.3.0-cp313-cp313-win32.whl", hash = "sha256:31cd3a85dbdf1fc002280c65caa7e2b5f65e4a973fcdf70dd2fdcb9868069294"}, - {file = "contourpy-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4553c421929ec95fb07b3aaca0fae668b2eb5a5203d1217ca7c34c063c53d087"}, - {file = "contourpy-1.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:345af746d7766821d05d72cb8f3845dfd08dd137101a2cb9b24de277d716def8"}, - {file = "contourpy-1.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3bb3808858a9dc68f6f03d319acd5f1b8a337e6cdda197f02f4b8ff67ad2057b"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:420d39daa61aab1221567b42eecb01112908b2cab7f1b4106a52caaec8d36973"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d63ee447261e963af02642ffcb864e5a2ee4cbfd78080657a9880b8b1868e18"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167d6c890815e1dac9536dca00828b445d5d0df4d6a8c6adb4a7ec3166812fa8"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:710a26b3dc80c0e4febf04555de66f5fd17e9cf7170a7b08000601a10570bda6"}, - {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:75ee7cb1a14c617f34a51d11fa7524173e56551646828353c4af859c56b766e2"}, - {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:33c92cdae89ec5135d036e7218e69b0bb2851206077251f04a6c4e0e21f03927"}, - {file = "contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8"}, - {file = "contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2"}, - {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e"}, - {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800"}, - {file = "contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5"}, - {file = "contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb"}, - {file = "contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4"}, + {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"}, + {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"}, + {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2f926efda994cdf3c8d3fdb40b9962f86edbc4457e739277b961eced3d0b4c1"}, + {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adce39d67c0edf383647a3a007de0a45fd1b08dedaa5318404f1a73059c2512b"}, + {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abbb49fb7dac584e5abc6636b7b2a7227111c4f771005853e7d25176daaf8453"}, + {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0cffcbede75c059f535725c1680dfb17b6ba8753f0c74b14e6a9c68c29d7ea3"}, + {file = "contourpy-1.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab29962927945d89d9b293eabd0d59aea28d887d4f3be6c22deaefbb938a7277"}, + {file = "contourpy-1.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:974d8145f8ca354498005b5b981165b74a195abfae9a8129df3e56771961d595"}, + {file = "contourpy-1.3.1-cp310-cp310-win32.whl", hash = "sha256:ac4578ac281983f63b400f7fe6c101bedc10651650eef012be1ccffcbacf3697"}, + {file = "contourpy-1.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:174e758c66bbc1c8576992cec9599ce8b6672b741b5d336b5c74e35ac382b18e"}, + {file = "contourpy-1.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8b974d8db2c5610fb4e76307e265de0edb655ae8169e8b21f41807ccbeec4b"}, + {file = "contourpy-1.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20914c8c973f41456337652a6eeca26d2148aa96dd7ac323b74516988bea89fc"}, + {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d40d37c1c3a4961b4619dd9d77b12124a453cc3d02bb31a07d58ef684d3d86"}, + {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:113231fe3825ebf6f15eaa8bc1f5b0ddc19d42b733345eae0934cb291beb88b6"}, + {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4dbbc03a40f916a8420e420d63e96a1258d3d1b58cbdfd8d1f07b49fcbd38e85"}, + {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a04ecd68acbd77fa2d39723ceca4c3197cb2969633836ced1bea14e219d077c"}, + {file = "contourpy-1.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c414fc1ed8ee1dbd5da626cf3710c6013d3d27456651d156711fa24f24bd1291"}, + {file = "contourpy-1.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:31c1b55c1f34f80557d3830d3dd93ba722ce7e33a0b472cba0ec3b6535684d8f"}, + {file = "contourpy-1.3.1-cp311-cp311-win32.whl", hash = "sha256:f611e628ef06670df83fce17805c344710ca5cde01edfdc72751311da8585375"}, + {file = "contourpy-1.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:b2bdca22a27e35f16794cf585832e542123296b4687f9fd96822db6bae17bfc9"}, + {file = "contourpy-1.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0ffa84be8e0bd33410b17189f7164c3589c229ce5db85798076a3fa136d0e509"}, + {file = "contourpy-1.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805617228ba7e2cbbfb6c503858e626ab528ac2a32a04a2fe88ffaf6b02c32bc"}, + {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade08d343436a94e633db932e7e8407fe7de8083967962b46bdfc1b0ced39454"}, + {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47734d7073fb4590b4a40122b35917cd77be5722d80683b249dac1de266aac80"}, + {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ba94a401342fc0f8b948e57d977557fbf4d515f03c67682dd5c6191cb2d16ec"}, + {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efa874e87e4a647fd2e4f514d5e91c7d493697127beb95e77d2f7561f6905bd9"}, + {file = "contourpy-1.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1bf98051f1045b15c87868dbaea84f92408337d4f81d0e449ee41920ea121d3b"}, + {file = "contourpy-1.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61332c87493b00091423e747ea78200659dc09bdf7fd69edd5e98cef5d3e9a8d"}, + {file = "contourpy-1.3.1-cp312-cp312-win32.whl", hash = "sha256:e914a8cb05ce5c809dd0fe350cfbb4e881bde5e2a38dc04e3afe1b3e58bd158e"}, + {file = "contourpy-1.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:08d9d449a61cf53033612cb368f3a1b26cd7835d9b8cd326647efe43bca7568d"}, + {file = "contourpy-1.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a761d9ccfc5e2ecd1bf05534eda382aa14c3e4f9205ba5b1684ecfe400716ef2"}, + {file = "contourpy-1.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:523a8ee12edfa36f6d2a49407f705a6ef4c5098de4f498619787e272de93f2d5"}, + {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece6df05e2c41bd46776fbc712e0996f7c94e0d0543af1656956d150c4ca7c81"}, + {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:573abb30e0e05bf31ed067d2f82500ecfdaec15627a59d63ea2d95714790f5c2"}, + {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fa36448e6a3a1a9a2ba23c02012c43ed88905ec80163f2ffe2421c7192a5d7"}, + {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ea9924d28fc5586bf0b42d15f590b10c224117e74409dd7a0be3b62b74a501c"}, + {file = "contourpy-1.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b75aa69cb4d6f137b36f7eb2ace9280cfb60c55dc5f61c731fdf6f037f958a3"}, + {file = "contourpy-1.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:041b640d4ec01922083645a94bb3b2e777e6b626788f4095cf21abbe266413c1"}, + {file = "contourpy-1.3.1-cp313-cp313-win32.whl", hash = "sha256:36987a15e8ace5f58d4d5da9dca82d498c2bbb28dff6e5d04fbfcc35a9cb3a82"}, + {file = "contourpy-1.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7895f46d47671fa7ceec40f31fae721da51ad34bdca0bee83e38870b1f47ffd"}, + {file = "contourpy-1.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9ddeb796389dadcd884c7eb07bd14ef12408aaae358f0e2ae24114d797eede30"}, + {file = "contourpy-1.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19c1555a6801c2f084c7ddc1c6e11f02eb6a6016ca1318dd5452ba3f613a1751"}, + {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:841ad858cff65c2c04bf93875e384ccb82b654574a6d7f30453a04f04af71342"}, + {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4318af1c925fb9a4fb190559ef3eec206845f63e80fb603d47f2d6d67683901c"}, + {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14c102b0eab282427b662cb590f2e9340a9d91a1c297f48729431f2dcd16e14f"}, + {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05e806338bfeaa006acbdeba0ad681a10be63b26e1b17317bfac3c5d98f36cda"}, + {file = "contourpy-1.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4d76d5993a34ef3df5181ba3c92fabb93f1eaa5729504fb03423fcd9f3177242"}, + {file = "contourpy-1.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:89785bb2a1980c1bd87f0cb1517a71cde374776a5f150936b82580ae6ead44a1"}, + {file = "contourpy-1.3.1-cp313-cp313t-win32.whl", hash = "sha256:8eb96e79b9f3dcadbad2a3891672f81cdcab7f95b27f28f1c67d75f045b6b4f1"}, + {file = "contourpy-1.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:287ccc248c9e0d0566934e7d606201abd74761b5703d804ff3df8935f523d546"}, + {file = "contourpy-1.3.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b457d6430833cee8e4b8e9b6f07aa1c161e5e0d52e118dc102c8f9bd7dd060d6"}, + {file = "contourpy-1.3.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb76c1a154b83991a3cbbf0dfeb26ec2833ad56f95540b442c73950af2013750"}, + {file = "contourpy-1.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:44a29502ca9c7b5ba389e620d44f2fbe792b1fb5734e8b931ad307071ec58c53"}, + {file = "contourpy-1.3.1.tar.gz", hash = "sha256:dfd97abd83335045a913e3bcc4a09c0ceadbe66580cf573fe961f4a825efa699"}, ] [package.dependencies] @@ -2079,22 +2027,23 @@ tokenizer = ["tiktoken"] [[package]] name = "dataclass-wizard" -version = "0.27.0" +version = "0.28.0" description = "Marshal dataclasses to/from JSON. Use field properties with initial values. Construct a dataclass schema with JSON input." optional = false python-versions = "*" files = [ - {file = "dataclass-wizard-0.27.0.tar.gz", hash = "sha256:6bb5d7101949e8e6c0a3a305ceb9e68b24e231858aad8ed4a83c12414ded1d0d"}, - {file = "dataclass_wizard-0.27.0-py2.py3-none-any.whl", hash = "sha256:a9ef05297c54823f6d82382123fd675347f6a1d02ee5a1c988a63855208aa6fb"}, + {file = "dataclass-wizard-0.28.0.tar.gz", hash = "sha256:dd295cff8df6d8167a79048b77e91a3a1287a5905363f8df4de819b50d83b03a"}, + {file = "dataclass_wizard-0.28.0-py2.py3-none-any.whl", hash = "sha256:996fa46475b9192a48a057c34f04597bc97be5bc2f163b99cb1de6f778ca1f7f"}, ] [package.dependencies] typing-extensions = {version = ">=4", markers = "python_version == \"3.9\" or python_version == \"3.10\""} [package.extras] -dev = ["Sphinx (==7.4.7)", "Sphinx (==8.1.3)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dataclass-factory (==2.16)", "dataclasses-json (==0.6.7)", "flake8 (>=3)", "jsons (==1.6.3)", "pip (>=21.3.1)", "pytest (==8.3.3)", "pytest-cov (==6.0.0)", "pytest-mock (>=3.6.1)", "pytimeparse (==1.1.8)", "sphinx-issues (==5.0.0)", "tox (==4.23.2)", "twine (==5.1.1)", "watchdog[watchmedo] (==6.0.0)", "wheel (==0.45.0)"] +dev = ["Sphinx (==7.4.7)", "Sphinx (==8.1.3)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dataclass-factory (==2.16)", "dataclass-wizard[toml]", "dataclasses-json (==0.6.7)", "flake8 (>=3)", "jsons (==1.6.3)", "pip (>=21.3.1)", "pytest (==8.3.3)", "pytest-cov (==6.0.0)", "pytest-mock (>=3.6.1)", "pytimeparse (==1.1.8)", "sphinx-issues (==5.0.0)", "tomli (>=2,<3)", "tomli (>=2,<3)", "tomli-w (>=1,<2)", "tox (==4.23.2)", "twine (==5.1.1)", "watchdog[watchmedo] (==6.0.0)", "wheel (==0.45.0)"] timedelta = ["pytimeparse (>=1.1.7)"] -yaml = ["PyYAML (>=5.3)"] +toml = ["tomli (>=2,<3)", "tomli (>=2,<3)", "tomli-w (>=1,<2)"] +yaml = ["PyYAML (>=6,<7)"] [[package]] name = "dataclasses-json" @@ -2113,13 +2062,13 @@ typing-inspect = ">=0.4.0,<1" [[package]] name = "db-dtypes" -version = "1.3.0" +version = "1.3.1" description = "Pandas Data Types for SQL systems (BigQuery, Spanner)" optional = false python-versions = ">=3.7" files = [ - {file = "db_dtypes-1.3.0-py2.py3-none-any.whl", hash = "sha256:7e65c59f849ccbe6f7bc4d0253edcc212a7907662906921caba3e4aadd0bc277"}, - {file = "db_dtypes-1.3.0.tar.gz", hash = "sha256:7bcbc8858b07474dc85b77bb2f3ae488978d1336f5ea73b58c39d9118bc3e91b"}, + {file = "db_dtypes-1.3.1-py2.py3-none-any.whl", hash = "sha256:fbc9d1740d94aaf2b5ae24601cfc875a69b4635bb9d049e3c3036e9f10203af8"}, + {file = "db_dtypes-1.3.1.tar.gz", hash = "sha256:a058f05dab100891f3e76a7a3db9ad0f107f18dd3d1bdd13680749a2f07eae77"}, ] [package.dependencies] @@ -2152,20 +2101,20 @@ files = [ [[package]] name = "deprecated" -version = "1.2.14" +version = "1.2.15" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, + {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, + {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"}, ] [package.dependencies] wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"] [[package]] name = "deprecation" @@ -2321,13 +2270,13 @@ files = [ [[package]] name = "duckduckgo-search" -version = "6.3.4" +version = "6.3.5" description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine." optional = false python-versions = ">=3.8" files = [ - {file = "duckduckgo_search-6.3.4-py3-none-any.whl", hash = "sha256:0c18279fb43cbb43e51a251a2133cd0be09604f5a0395fe05409e213bed0cf00"}, - {file = "duckduckgo_search-6.3.4.tar.gz", hash = "sha256:71317d0dee393cb2c0fb8d2eedc76bba0d8c93c752fe97be0030c39b89fd05f9"}, + {file = "duckduckgo_search-6.3.5-py3-none-any.whl", hash = "sha256:5b29ac55f178214870ccc911ef5e1e350c21a904e9e1dbd6445f78c16ee938f9"}, + {file = "duckduckgo_search-6.3.5.tar.gz", hash = "sha256:bc7604859d6f17b88ec634f322b1920207fe3d62aa61ee6dccecb19d6dda6beb"}, ] [package.dependencies] @@ -2496,13 +2445,13 @@ test = ["pillow", "pytest", "pytest-asyncio"] [[package]] name = "fastapi" -version = "0.115.4" +version = "0.115.5" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"}, - {file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"}, + {file = "fastapi-0.115.5-py3-none-any.whl", hash = "sha256:596b95adbe1474da47049e802f9a65ab2ffa9c2b07e7efee70eb8a66c9f2f796"}, + {file = "fastapi-0.115.5.tar.gz", hash = "sha256:0e7a4d0dc0d01c68df21887cce0945e72d3c48b9f4f79dfe7a7d53aa08fbb289"}, ] [package.dependencies] @@ -2809,59 +2758,61 @@ fonttools = "*" [[package]] name = "fonttools" -version = "4.54.1" +version = "4.55.0" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.54.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ed7ee041ff7b34cc62f07545e55e1468808691dddfd315d51dd82a6b37ddef2"}, - {file = "fonttools-4.54.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41bb0b250c8132b2fcac148e2e9198e62ff06f3cc472065dff839327945c5882"}, - {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7965af9b67dd546e52afcf2e38641b5be956d68c425bef2158e95af11d229f10"}, - {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278913a168f90d53378c20c23b80f4e599dca62fbffae4cc620c8eed476b723e"}, - {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0e88e3018ac809b9662615072dcd6b84dca4c2d991c6d66e1970a112503bba7e"}, - {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4aa4817f0031206e637d1e685251ac61be64d1adef111060df84fdcbc6ab6c44"}, - {file = "fonttools-4.54.1-cp310-cp310-win32.whl", hash = "sha256:7e3b7d44e18c085fd8c16dcc6f1ad6c61b71ff463636fcb13df7b1b818bd0c02"}, - {file = "fonttools-4.54.1-cp310-cp310-win_amd64.whl", hash = "sha256:dd9cc95b8d6e27d01e1e1f1fae8559ef3c02c76317da650a19047f249acd519d"}, - {file = "fonttools-4.54.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5419771b64248484299fa77689d4f3aeed643ea6630b2ea750eeab219588ba20"}, - {file = "fonttools-4.54.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:301540e89cf4ce89d462eb23a89464fef50915255ece765d10eee8b2bf9d75b2"}, - {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ae5091547e74e7efecc3cbf8e75200bc92daaeb88e5433c5e3e95ea8ce5aa7"}, - {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82834962b3d7c5ca98cb56001c33cf20eb110ecf442725dc5fdf36d16ed1ab07"}, - {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d26732ae002cc3d2ecab04897bb02ae3f11f06dd7575d1df46acd2f7c012a8d8"}, - {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58974b4987b2a71ee08ade1e7f47f410c367cdfc5a94fabd599c88165f56213a"}, - {file = "fonttools-4.54.1-cp311-cp311-win32.whl", hash = "sha256:ab774fa225238986218a463f3fe151e04d8c25d7de09df7f0f5fce27b1243dbc"}, - {file = "fonttools-4.54.1-cp311-cp311-win_amd64.whl", hash = "sha256:07e005dc454eee1cc60105d6a29593459a06321c21897f769a281ff2d08939f6"}, - {file = "fonttools-4.54.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:54471032f7cb5fca694b5f1a0aaeba4af6e10ae989df408e0216f7fd6cdc405d"}, - {file = "fonttools-4.54.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fa92cb248e573daab8d032919623cc309c005086d743afb014c836636166f08"}, - {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a911591200114969befa7f2cb74ac148bce5a91df5645443371aba6d222e263"}, - {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93d458c8a6a354dc8b48fc78d66d2a8a90b941f7fec30e94c7ad9982b1fa6bab"}, - {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5eb2474a7c5be8a5331146758debb2669bf5635c021aee00fd7c353558fc659d"}, - {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c9c563351ddc230725c4bdf7d9e1e92cbe6ae8553942bd1fb2b2ff0884e8b714"}, - {file = "fonttools-4.54.1-cp312-cp312-win32.whl", hash = "sha256:fdb062893fd6d47b527d39346e0c5578b7957dcea6d6a3b6794569370013d9ac"}, - {file = "fonttools-4.54.1-cp312-cp312-win_amd64.whl", hash = "sha256:e4564cf40cebcb53f3dc825e85910bf54835e8a8b6880d59e5159f0f325e637e"}, - {file = "fonttools-4.54.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6e37561751b017cf5c40fce0d90fd9e8274716de327ec4ffb0df957160be3bff"}, - {file = "fonttools-4.54.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:357cacb988a18aace66e5e55fe1247f2ee706e01debc4b1a20d77400354cddeb"}, - {file = "fonttools-4.54.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e953cc0bddc2beaf3a3c3b5dd9ab7554677da72dfaf46951e193c9653e515a"}, - {file = "fonttools-4.54.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58d29b9a294573d8319f16f2f79e42428ba9b6480442fa1836e4eb89c4d9d61c"}, - {file = "fonttools-4.54.1-cp313-cp313-win32.whl", hash = "sha256:9ef1b167e22709b46bf8168368b7b5d3efeaaa746c6d39661c1b4405b6352e58"}, - {file = "fonttools-4.54.1-cp313-cp313-win_amd64.whl", hash = "sha256:262705b1663f18c04250bd1242b0515d3bbae177bee7752be67c979b7d47f43d"}, - {file = "fonttools-4.54.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ed2f80ca07025551636c555dec2b755dd005e2ea8fbeb99fc5cdff319b70b23b"}, - {file = "fonttools-4.54.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9dc080e5a1c3b2656caff2ac2633d009b3a9ff7b5e93d0452f40cd76d3da3b3c"}, - {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d152d1be65652fc65e695e5619e0aa0982295a95a9b29b52b85775243c06556"}, - {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8583e563df41fdecef31b793b4dd3af8a9caa03397be648945ad32717a92885b"}, - {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0d1d353ef198c422515a3e974a1e8d5b304cd54a4c2eebcae708e37cd9eeffb1"}, - {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fda582236fee135d4daeca056c8c88ec5f6f6d88a004a79b84a02547c8f57386"}, - {file = "fonttools-4.54.1-cp38-cp38-win32.whl", hash = "sha256:e7d82b9e56716ed32574ee106cabca80992e6bbdcf25a88d97d21f73a0aae664"}, - {file = "fonttools-4.54.1-cp38-cp38-win_amd64.whl", hash = "sha256:ada215fd079e23e060157aab12eba0d66704316547f334eee9ff26f8c0d7b8ab"}, - {file = "fonttools-4.54.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5b8a096e649768c2f4233f947cf9737f8dbf8728b90e2771e2497c6e3d21d13"}, - {file = "fonttools-4.54.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e10d2e0a12e18f4e2dd031e1bf7c3d7017be5c8dbe524d07706179f355c5dac"}, - {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c32d7d4b0958600eac75eaf524b7b7cb68d3a8c196635252b7a2c30d80e986"}, - {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c39287f5c8f4a0c5a55daf9eaf9ccd223ea59eed3f6d467133cc727d7b943a55"}, - {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a7a310c6e0471602fe3bf8efaf193d396ea561486aeaa7adc1f132e02d30c4b9"}, - {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d3b659d1029946f4ff9b6183984578041b520ce0f8fb7078bb37ec7445806b33"}, - {file = "fonttools-4.54.1-cp39-cp39-win32.whl", hash = "sha256:e96bc94c8cda58f577277d4a71f51c8e2129b8b36fd05adece6320dd3d57de8a"}, - {file = "fonttools-4.54.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8a4b261c1ef91e7188a30571be6ad98d1c6d9fa2427244c545e2fa0a2494dd7"}, - {file = "fonttools-4.54.1-py3-none-any.whl", hash = "sha256:37cddd62d83dc4f72f7c3f3c2bcf2697e89a30efb152079896544a93907733bd"}, - {file = "fonttools-4.54.1.tar.gz", hash = "sha256:957f669d4922f92c171ba01bef7f29410668db09f6c02111e22b2bce446f3285"}, + {file = "fonttools-4.55.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:51c029d4c0608a21a3d3d169dfc3fb776fde38f00b35ca11fdab63ba10a16f61"}, + {file = "fonttools-4.55.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bca35b4e411362feab28e576ea10f11268b1aeed883b9f22ed05675b1e06ac69"}, + {file = "fonttools-4.55.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ce4ba6981e10f7e0ccff6348e9775ce25ffadbee70c9fd1a3737e3e9f5fa74f"}, + {file = "fonttools-4.55.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31d00f9852a6051dac23294a4cf2df80ced85d1d173a61ba90a3d8f5abc63c60"}, + {file = "fonttools-4.55.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e198e494ca6e11f254bac37a680473a311a88cd40e58f9cc4dc4911dfb686ec6"}, + {file = "fonttools-4.55.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7208856f61770895e79732e1dcbe49d77bd5783adf73ae35f87fcc267df9db81"}, + {file = "fonttools-4.55.0-cp310-cp310-win32.whl", hash = "sha256:e7e6a352ff9e46e8ef8a3b1fe2c4478f8a553e1b5a479f2e899f9dc5f2055880"}, + {file = "fonttools-4.55.0-cp310-cp310-win_amd64.whl", hash = "sha256:636caaeefe586d7c84b5ee0734c1a5ab2dae619dc21c5cf336f304ddb8f6001b"}, + {file = "fonttools-4.55.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fa34aa175c91477485c44ddfbb51827d470011e558dfd5c7309eb31bef19ec51"}, + {file = "fonttools-4.55.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:37dbb3fdc2ef7302d3199fb12468481cbebaee849e4b04bc55b77c24e3c49189"}, + {file = "fonttools-4.55.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5263d8e7ef3c0ae87fbce7f3ec2f546dc898d44a337e95695af2cd5ea21a967"}, + {file = "fonttools-4.55.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f307f6b5bf9e86891213b293e538d292cd1677e06d9faaa4bf9c086ad5f132f6"}, + {file = "fonttools-4.55.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f0a4b52238e7b54f998d6a56b46a2c56b59c74d4f8a6747fb9d4042190f37cd3"}, + {file = "fonttools-4.55.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3e569711464f777a5d4ef522e781dc33f8095ab5efd7548958b36079a9f2f88c"}, + {file = "fonttools-4.55.0-cp311-cp311-win32.whl", hash = "sha256:2b3ab90ec0f7b76c983950ac601b58949f47aca14c3f21eed858b38d7ec42b05"}, + {file = "fonttools-4.55.0-cp311-cp311-win_amd64.whl", hash = "sha256:aa046f6a63bb2ad521004b2769095d4c9480c02c1efa7d7796b37826508980b6"}, + {file = "fonttools-4.55.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:838d2d8870f84fc785528a692e724f2379d5abd3fc9dad4d32f91cf99b41e4a7"}, + {file = "fonttools-4.55.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f46b863d74bab7bb0d395f3b68d3f52a03444964e67ce5c43ce43a75efce9246"}, + {file = "fonttools-4.55.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33b52a9cfe4e658e21b1f669f7309b4067910321757fec53802ca8f6eae96a5a"}, + {file = "fonttools-4.55.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:732a9a63d6ea4a81b1b25a1f2e5e143761b40c2e1b79bb2b68e4893f45139a40"}, + {file = "fonttools-4.55.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7dd91ac3fcb4c491bb4763b820bcab6c41c784111c24172616f02f4bc227c17d"}, + {file = "fonttools-4.55.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f0e115281a32ff532118aa851ef497a1b7cda617f4621c1cdf81ace3e36fb0c"}, + {file = "fonttools-4.55.0-cp312-cp312-win32.whl", hash = "sha256:6c99b5205844f48a05cb58d4a8110a44d3038c67ed1d79eb733c4953c628b0f6"}, + {file = "fonttools-4.55.0-cp312-cp312-win_amd64.whl", hash = "sha256:f8c8c76037d05652510ae45be1cd8fb5dd2fd9afec92a25374ac82255993d57c"}, + {file = "fonttools-4.55.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8118dc571921dc9e4b288d9cb423ceaf886d195a2e5329cc427df82bba872cd9"}, + {file = "fonttools-4.55.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01124f2ca6c29fad4132d930da69158d3f49b2350e4a779e1efbe0e82bd63f6c"}, + {file = "fonttools-4.55.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ffd58d2691f11f7c8438796e9f21c374828805d33e83ff4b76e4635633674c"}, + {file = "fonttools-4.55.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5435e5f1eb893c35c2bc2b9cd3c9596b0fcb0a59e7a14121562986dd4c47b8dd"}, + {file = "fonttools-4.55.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d12081729280c39d001edd0f4f06d696014c26e6e9a0a55488fabc37c28945e4"}, + {file = "fonttools-4.55.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7ad1f1b98ab6cb927ab924a38a8649f1ffd7525c75fe5b594f5dab17af70e18"}, + {file = "fonttools-4.55.0-cp313-cp313-win32.whl", hash = "sha256:abe62987c37630dca69a104266277216de1023cf570c1643bb3a19a9509e7a1b"}, + {file = "fonttools-4.55.0-cp313-cp313-win_amd64.whl", hash = "sha256:2863555ba90b573e4201feaf87a7e71ca3b97c05aa4d63548a4b69ea16c9e998"}, + {file = "fonttools-4.55.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:00f7cf55ad58a57ba421b6a40945b85ac7cc73094fb4949c41171d3619a3a47e"}, + {file = "fonttools-4.55.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f27526042efd6f67bfb0cc2f1610fa20364396f8b1fc5edb9f45bb815fb090b2"}, + {file = "fonttools-4.55.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e67974326af6a8879dc2a4ec63ab2910a1c1a9680ccd63e4a690950fceddbe"}, + {file = "fonttools-4.55.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61dc0a13451143c5e987dec5254d9d428f3c2789a549a7cf4f815b63b310c1cc"}, + {file = "fonttools-4.55.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b2e526b325a903868c62155a6a7e24df53f6ce4c5c3160214d8fe1be2c41b478"}, + {file = "fonttools-4.55.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b7ef9068a1297714e6fefe5932c33b058aa1d45a2b8be32a4c6dee602ae22b5c"}, + {file = "fonttools-4.55.0-cp38-cp38-win32.whl", hash = "sha256:55718e8071be35dff098976bc249fc243b58efa263768c611be17fe55975d40a"}, + {file = "fonttools-4.55.0-cp38-cp38-win_amd64.whl", hash = "sha256:553bd4f8cc327f310c20158e345e8174c8eed49937fb047a8bda51daf2c353c8"}, + {file = "fonttools-4.55.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f901cef813f7c318b77d1c5c14cf7403bae5cb977cede023e22ba4316f0a8f6"}, + {file = "fonttools-4.55.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c9679fc0dd7e8a5351d321d8d29a498255e69387590a86b596a45659a39eb0d"}, + {file = "fonttools-4.55.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2820a8b632f3307ebb0bf57948511c2208e34a4939cf978333bc0a3f11f838"}, + {file = "fonttools-4.55.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23bbbb49bec613a32ed1b43df0f2b172313cee690c2509f1af8fdedcf0a17438"}, + {file = "fonttools-4.55.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a656652e1f5d55b9728937a7e7d509b73d23109cddd4e89ee4f49bde03b736c6"}, + {file = "fonttools-4.55.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f50a1f455902208486fbca47ce33054208a4e437b38da49d6721ce2fef732fcf"}, + {file = "fonttools-4.55.0-cp39-cp39-win32.whl", hash = "sha256:161d1ac54c73d82a3cded44202d0218ab007fde8cf194a23d3dd83f7177a2f03"}, + {file = "fonttools-4.55.0-cp39-cp39-win_amd64.whl", hash = "sha256:ca7fd6987c68414fece41c96836e945e1f320cda56fc96ffdc16e54a44ec57a2"}, + {file = "fonttools-4.55.0-py3-none-any.whl", hash = "sha256:12db5888cd4dd3fcc9f0ee60c6edd3c7e1fd44b7dd0f31381ea03df68f8a153f"}, + {file = "fonttools-4.55.0.tar.gz", hash = "sha256:7636acc6ab733572d5e7eec922b254ead611f1cdad17be3f0be7418e8bfaca71"}, ] [package.extras] @@ -3371,13 +3322,13 @@ xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] [[package]] name = "google-cloud-bigquery" -version = "3.26.0" +version = "3.27.0" description = "Google BigQuery API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google_cloud_bigquery-3.26.0-py2.py3-none-any.whl", hash = "sha256:e0e9ad28afa67a18696e624cbccab284bf2c0a3f6eeb9eeb0426c69b943793a8"}, - {file = "google_cloud_bigquery-3.26.0.tar.gz", hash = "sha256:edbdc788beea659e04c0af7fe4dcd6d9155344b98951a0d5055bd2f15da4ba23"}, + {file = "google_cloud_bigquery-3.27.0-py2.py3-none-any.whl", hash = "sha256:b53b0431e5ba362976a4cd8acce72194b4116cdf8115030c7b339b884603fcc3"}, + {file = "google_cloud_bigquery-3.27.0.tar.gz", hash = "sha256:379c524054d7b090fa56d0c22662cc6e6458a6229b6754c0e7177e3a73421d2c"}, ] [package.dependencies] @@ -3420,13 +3371,13 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-resource-manager" -version = "1.13.0" +version = "1.13.1" description = "Google Cloud Resource Manager API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google_cloud_resource_manager-1.13.0-py2.py3-none-any.whl", hash = "sha256:33beb4528c2b7aee7a97ed843710581a7b4a27f3dd1fa41a0bf3359b3d68853f"}, - {file = "google_cloud_resource_manager-1.13.0.tar.gz", hash = "sha256:ae4bf69443f14b37007d4d84150115b0942e8b01650fd7a1fc6ff4dc1760e5c4"}, + {file = "google_cloud_resource_manager-1.13.1-py2.py3-none-any.whl", hash = "sha256:abdc7d443ab6c0763b8ed49ab59203e223f14c683df69e3748d5eb2237475f5f"}, + {file = "google_cloud_resource_manager-1.13.1.tar.gz", hash = "sha256:bee9f2fb1d856731182b7cc05980d216aae848947ccdadf2848a2c64ccd6bbea"}, ] [package.dependencies] @@ -3690,70 +3641,70 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [[package]] name = "grpcio" -version = "1.67.1" +version = "1.68.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.67.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f"}, - {file = "grpcio-1.67.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d"}, - {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:43112046864317498a33bdc4797ae6a268c36345a910de9b9c17159d8346602f"}, - {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9b929f13677b10f63124c1a410994a401cdd85214ad83ab67cc077fc7e480f0"}, - {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7d1797a8a3845437d327145959a2c0c47c05947c9eef5ff1a4c80e499dcc6fa"}, - {file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0489063974d1452436139501bf6b180f63d4977223ee87488fe36858c5725292"}, - {file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9fd042de4a82e3e7aca44008ee2fb5da01b3e5adb316348c21980f7f58adc311"}, - {file = "grpcio-1.67.1-cp310-cp310-win32.whl", hash = "sha256:638354e698fd0c6c76b04540a850bf1db27b4d2515a19fcd5cf645c48d3eb1ed"}, - {file = "grpcio-1.67.1-cp310-cp310-win_amd64.whl", hash = "sha256:608d87d1bdabf9e2868b12338cd38a79969eaf920c89d698ead08f48de9c0f9e"}, - {file = "grpcio-1.67.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:7818c0454027ae3384235a65210bbf5464bd715450e30a3d40385453a85a70cb"}, - {file = "grpcio-1.67.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ea33986b70f83844cd00814cee4451055cd8cab36f00ac64a31f5bb09b31919e"}, - {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c7a01337407dd89005527623a4a72c5c8e2894d22bead0895306b23c6695698f"}, - {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b866f73224b0634f4312a4674c1be21b2b4afa73cb20953cbbb73a6b36c3cc"}, - {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9fff78ba10d4250bfc07a01bd6254a6d87dc67f9627adece85c0b2ed754fa96"}, - {file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8a23cbcc5bb11ea7dc6163078be36c065db68d915c24f5faa4f872c573bb400f"}, - {file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1a65b503d008f066e994f34f456e0647e5ceb34cfcec5ad180b1b44020ad4970"}, - {file = "grpcio-1.67.1-cp311-cp311-win32.whl", hash = "sha256:e29ca27bec8e163dca0c98084040edec3bc49afd10f18b412f483cc68c712744"}, - {file = "grpcio-1.67.1-cp311-cp311-win_amd64.whl", hash = "sha256:786a5b18544622bfb1e25cc08402bd44ea83edfb04b93798d85dca4d1a0b5be5"}, - {file = "grpcio-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953"}, - {file = "grpcio-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb"}, - {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0"}, - {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af"}, - {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e"}, - {file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75"}, - {file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38"}, - {file = "grpcio-1.67.1-cp312-cp312-win32.whl", hash = "sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78"}, - {file = "grpcio-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc"}, - {file = "grpcio-1.67.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b"}, - {file = "grpcio-1.67.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1"}, - {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af"}, - {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955"}, - {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8"}, - {file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62"}, - {file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb"}, - {file = "grpcio-1.67.1-cp313-cp313-win32.whl", hash = "sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121"}, - {file = "grpcio-1.67.1-cp313-cp313-win_amd64.whl", hash = "sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba"}, - {file = "grpcio-1.67.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:178f5db771c4f9a9facb2ab37a434c46cb9be1a75e820f187ee3d1e7805c4f65"}, - {file = "grpcio-1.67.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f3e49c738396e93b7ba9016e153eb09e0778e776df6090c1b8c91877cc1c426"}, - {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:24e8a26dbfc5274d7474c27759b54486b8de23c709d76695237515bc8b5baeab"}, - {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b6c16489326d79ead41689c4b84bc40d522c9a7617219f4ad94bc7f448c5085"}, - {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e6a4dcf5af7bbc36fd9f81c9f372e8ae580870a9e4b6eafe948cd334b81cf3"}, - {file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:95b5f2b857856ed78d72da93cd7d09b6db8ef30102e5e7fe0961fe4d9f7d48e8"}, - {file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b49359977c6ec9f5d0573ea4e0071ad278ef905aa74e420acc73fd28ce39e9ce"}, - {file = "grpcio-1.67.1-cp38-cp38-win32.whl", hash = "sha256:f5b76ff64aaac53fede0cc93abf57894ab2a7362986ba22243d06218b93efe46"}, - {file = "grpcio-1.67.1-cp38-cp38-win_amd64.whl", hash = "sha256:804c6457c3cd3ec04fe6006c739579b8d35c86ae3298ffca8de57b493524b771"}, - {file = "grpcio-1.67.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:a25bdea92b13ff4d7790962190bf6bf5c4639876e01c0f3dda70fc2769616335"}, - {file = "grpcio-1.67.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cdc491ae35a13535fd9196acb5afe1af37c8237df2e54427be3eecda3653127e"}, - {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:85f862069b86a305497e74d0dc43c02de3d1d184fc2c180993aa8aa86fbd19b8"}, - {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec74ef02010186185de82cc594058a3ccd8d86821842bbac9873fd4a2cf8be8d"}, - {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01f616a964e540638af5130469451cf580ba8c7329f45ca998ab66e0c7dcdb04"}, - {file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:299b3d8c4f790c6bcca485f9963b4846dd92cf6f1b65d3697145d005c80f9fe8"}, - {file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:60336bff760fbb47d7e86165408126f1dded184448e9a4c892189eb7c9d3f90f"}, - {file = "grpcio-1.67.1-cp39-cp39-win32.whl", hash = "sha256:5ed601c4c6008429e3d247ddb367fe8c7259c355757448d7c1ef7bd4a6739e8e"}, - {file = "grpcio-1.67.1-cp39-cp39-win_amd64.whl", hash = "sha256:5db70d32d6703b89912af16d6d45d78406374a8b8ef0d28140351dd0ec610e98"}, - {file = "grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732"}, + {file = "grpcio-1.68.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:619b5d0f29f4f5351440e9343224c3e19912c21aeda44e0c49d0d147a8d01544"}, + {file = "grpcio-1.68.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a59f5822f9459bed098ffbceb2713abbf7c6fd13f2b9243461da5c338d0cd6c3"}, + {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:c03d89df516128febc5a7e760d675b478ba25802447624edf7aa13b1e7b11e2a"}, + {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44bcbebb24363d587472089b89e2ea0ab2e2b4df0e4856ba4c0b087c82412121"}, + {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79f81b7fbfb136247b70465bd836fa1733043fdee539cd6031cb499e9608a110"}, + {file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:88fb2925789cfe6daa20900260ef0a1d0a61283dfb2d2fffe6194396a354c618"}, + {file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:99f06232b5c9138593ae6f2e355054318717d32a9c09cdc5a2885540835067a1"}, + {file = "grpcio-1.68.0-cp310-cp310-win32.whl", hash = "sha256:a6213d2f7a22c3c30a479fb5e249b6b7e648e17f364598ff64d08a5136fe488b"}, + {file = "grpcio-1.68.0-cp310-cp310-win_amd64.whl", hash = "sha256:15327ab81131ef9b94cb9f45b5bd98803a179c7c61205c8c0ac9aff9d6c4e82a"}, + {file = "grpcio-1.68.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:3b2b559beb2d433129441783e5f42e3be40a9e1a89ec906efabf26591c5cd415"}, + {file = "grpcio-1.68.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e46541de8425a4d6829ac6c5d9b16c03c292105fe9ebf78cb1c31e8d242f9155"}, + {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c1245651f3c9ea92a2db4f95d37b7597db6b246d5892bca6ee8c0e90d76fb73c"}, + {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1931c7aa85be0fa6cea6af388e576f3bf6baee9e5d481c586980c774debcb4"}, + {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ff09c81e3aded7a183bc6473639b46b6caa9c1901d6f5e2cba24b95e59e30"}, + {file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8c73f9fbbaee1a132487e31585aa83987ddf626426d703ebcb9a528cf231c9b1"}, + {file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6b2f98165ea2790ea159393a2246b56f580d24d7da0d0342c18a085299c40a75"}, + {file = "grpcio-1.68.0-cp311-cp311-win32.whl", hash = "sha256:e1e7ed311afb351ff0d0e583a66fcb39675be112d61e7cfd6c8269884a98afbc"}, + {file = "grpcio-1.68.0-cp311-cp311-win_amd64.whl", hash = "sha256:e0d2f68eaa0a755edd9a47d40e50dba6df2bceda66960dee1218da81a2834d27"}, + {file = "grpcio-1.68.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8af6137cc4ae8e421690d276e7627cfc726d4293f6607acf9ea7260bd8fc3d7d"}, + {file = "grpcio-1.68.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4028b8e9a3bff6f377698587d642e24bd221810c06579a18420a17688e421af7"}, + {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f60fa2adf281fd73ae3a50677572521edca34ba373a45b457b5ebe87c2d01e1d"}, + {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e18589e747c1e70b60fab6767ff99b2d0c359ea1db8a2cb524477f93cdbedf5b"}, + {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0d30f3fee9372796f54d3100b31ee70972eaadcc87314be369360248a3dcffe"}, + {file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7e0a3e72c0e9a1acab77bef14a73a416630b7fd2cbd893c0a873edc47c42c8cd"}, + {file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a831dcc343440969aaa812004685ed322cdb526cd197112d0db303b0da1e8659"}, + {file = "grpcio-1.68.0-cp312-cp312-win32.whl", hash = "sha256:5a180328e92b9a0050958ced34dddcb86fec5a8b332f5a229e353dafc16cd332"}, + {file = "grpcio-1.68.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bddd04a790b69f7a7385f6a112f46ea0b34c4746f361ebafe9ca0be567c78e9"}, + {file = "grpcio-1.68.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:fc05759ffbd7875e0ff2bd877be1438dfe97c9312bbc558c8284a9afa1d0f40e"}, + {file = "grpcio-1.68.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:15fa1fe25d365a13bc6d52fcac0e3ee1f9baebdde2c9b3b2425f8a4979fccea1"}, + {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:32a9cb4686eb2e89d97022ecb9e1606d132f85c444354c17a7dbde4a455e4a3b"}, + {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dba037ff8d284c8e7ea9a510c8ae0f5b016004f13c3648f72411c464b67ff2fb"}, + {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0efbbd849867e0e569af09e165363ade75cf84f5229b2698d53cf22c7a4f9e21"}, + {file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:4e300e6978df0b65cc2d100c54e097c10dfc7018b9bd890bbbf08022d47f766d"}, + {file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:6f9c7ad1a23e1047f827385f4713b5b8c6c7d325705be1dd3e31fb00dcb2f665"}, + {file = "grpcio-1.68.0-cp313-cp313-win32.whl", hash = "sha256:3ac7f10850fd0487fcce169c3c55509101c3bde2a3b454869639df2176b60a03"}, + {file = "grpcio-1.68.0-cp313-cp313-win_amd64.whl", hash = "sha256:afbf45a62ba85a720491bfe9b2642f8761ff348006f5ef67e4622621f116b04a"}, + {file = "grpcio-1.68.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:f8f695d9576ce836eab27ba7401c60acaf9ef6cf2f70dfe5462055ba3df02cc3"}, + {file = "grpcio-1.68.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9fe1b141cda52f2ca73e17d2d3c6a9f3f3a0c255c216b50ce616e9dca7e3441d"}, + {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:4df81d78fd1646bf94ced4fb4cd0a7fe2e91608089c522ef17bc7db26e64effd"}, + {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46a2d74d4dd8993151c6cd585594c082abe74112c8e4175ddda4106f2ceb022f"}, + {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a17278d977746472698460c63abf333e1d806bd41f2224f90dbe9460101c9796"}, + {file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:15377bce516b1c861c35e18eaa1c280692bf563264836cece693c0f169b48829"}, + {file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc5f0a4f5904b8c25729a0498886b797feb817d1fd3812554ffa39551112c161"}, + {file = "grpcio-1.68.0-cp38-cp38-win32.whl", hash = "sha256:def1a60a111d24376e4b753db39705adbe9483ef4ca4761f825639d884d5da78"}, + {file = "grpcio-1.68.0-cp38-cp38-win_amd64.whl", hash = "sha256:55d3b52fd41ec5772a953612db4e70ae741a6d6ed640c4c89a64f017a1ac02b5"}, + {file = "grpcio-1.68.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:0d230852ba97654453d290e98d6aa61cb48fa5fafb474fb4c4298d8721809354"}, + {file = "grpcio-1.68.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:50992f214264e207e07222703c17d9cfdcc2c46ed5a1ea86843d440148ebbe10"}, + {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:14331e5c27ed3545360464a139ed279aa09db088f6e9502e95ad4bfa852bb116"}, + {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f84890b205692ea813653ece4ac9afa2139eae136e419231b0eec7c39fdbe4c2"}, + {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0cf343c6f4f6aa44863e13ec9ddfe299e0be68f87d68e777328bff785897b05"}, + {file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fd2c2d47969daa0e27eadaf15c13b5e92605c5e5953d23c06d0b5239a2f176d3"}, + {file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:18668e36e7f4045820f069997834e94e8275910b1f03e078a6020bd464cb2363"}, + {file = "grpcio-1.68.0-cp39-cp39-win32.whl", hash = "sha256:2af76ab7c427aaa26aa9187c3e3c42f38d3771f91a20f99657d992afada2294a"}, + {file = "grpcio-1.68.0-cp39-cp39-win_amd64.whl", hash = "sha256:e694b5928b7b33ca2d3b4d5f9bf8b5888906f181daff6b406f4938f3a997a490"}, + {file = "grpcio-1.68.0.tar.gz", hash = "sha256:7e7483d39b4a4fddb9906671e9ea21aaad4f031cdfc349fec76bdfa1e404543a"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.67.1)"] +protobuf = ["grpcio-tools (>=1.68.0)"] [[package]] name = "grpcio-status" @@ -4017,13 +3968,13 @@ lxml = ["lxml"] [[package]] name = "httpcore" -version = "1.0.6" +version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, - {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [package.dependencies] @@ -4324,84 +4275,84 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jiter" -version = "0.7.0" +version = "0.7.1" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" files = [ - {file = "jiter-0.7.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e14027f61101b3f5e173095d9ecf95c1cac03ffe45a849279bde1d97e559e314"}, - {file = "jiter-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:979ec4711c2e37ac949561858bd42028884c9799516a923e1ff0b501ef341a4a"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:662d5d3cca58ad6af7a3c6226b641c8655de5beebcb686bfde0df0f21421aafa"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d89008fb47043a469f97ad90840b97ba54e7c3d62dc7cbb6cbf938bd0caf71d"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8b16c35c846a323ce9067170d5ab8c31ea3dbcab59c4f7608bbbf20c2c3b43f"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9e82daaa1b0a68704f9029b81e664a5a9de3e466c2cbaabcda5875f961702e7"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43a87a9f586636e1f0dd3651a91f79b491ea0d9fd7cbbf4f5c463eebdc48bda7"}, - {file = "jiter-0.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2ec05b1615f96cc3e4901678bc863958611584072967d9962f9e571d60711d52"}, - {file = "jiter-0.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a5cb97e35370bde7aa0d232a7f910f5a0fbbc96bc0a7dbaa044fd5cd6bcd7ec3"}, - {file = "jiter-0.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb316dacaf48c8c187cea75d0d7f835f299137e6fdd13f691dff8f92914015c7"}, - {file = "jiter-0.7.0-cp310-none-win32.whl", hash = "sha256:243f38eb4072763c54de95b14ad283610e0cd3bf26393870db04e520f60eebb3"}, - {file = "jiter-0.7.0-cp310-none-win_amd64.whl", hash = "sha256:2221d5603c139f6764c54e37e7c6960c469cbcd76928fb10d15023ba5903f94b"}, - {file = "jiter-0.7.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:91cec0ad755bd786c9f769ce8d843af955df6a8e56b17658771b2d5cb34a3ff8"}, - {file = "jiter-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:feba70a28a27d962e353e978dbb6afd798e711c04cb0b4c5e77e9d3779033a1a"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d866ec066c3616cacb8535dbda38bb1d470b17b25f0317c4540182bc886ce2"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e7a7a00b6f9f18289dd563596f97ecaba6c777501a8ba04bf98e03087bcbc60"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9aaf564094c7db8687f2660605e099f3d3e6ea5e7135498486674fcb78e29165"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4d27e09825c1b3c7a667adb500ce8b840e8fc9f630da8454b44cdd4fb0081bb"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca7c287da9c1d56dda88da1d08855a787dbb09a7e2bd13c66a2e288700bd7c7"}, - {file = "jiter-0.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:db19a6d160f093cbc8cd5ea2abad420b686f6c0e5fb4f7b41941ebc6a4f83cda"}, - {file = "jiter-0.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e46a63c7f877cf7441ffc821c28287cfb9f533ae6ed707bde15e7d4dfafa7ae"}, - {file = "jiter-0.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ba426fa7ff21cb119fa544b75dd3fbee6a70e55a5829709c0338d07ccd30e6d"}, - {file = "jiter-0.7.0-cp311-none-win32.whl", hash = "sha256:c07f55a64912b0c7982377831210836d2ea92b7bd343fca67a32212dd72e38e0"}, - {file = "jiter-0.7.0-cp311-none-win_amd64.whl", hash = "sha256:ed27b2c43e1b5f6c7fedc5c11d4d8bfa627de42d1143d87e39e2e83ddefd861a"}, - {file = "jiter-0.7.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ac7930bcaaeb1e229e35c91c04ed2e9f39025b86ee9fc3141706bbf6fff4aeeb"}, - {file = "jiter-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:571feae3e7c901a8eedde9fd2865b0dfc1432fb15cab8c675a8444f7d11b7c5d"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8af4df8a262fa2778b68c2a03b6e9d1cb4d43d02bea6976d46be77a3a331af1"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd028d4165097a611eb0c7494d8c1f2aebd46f73ca3200f02a175a9c9a6f22f5"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6b487247c7836810091e9455efe56a52ec51bfa3a222237e1587d04d3e04527"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6d28a92f28814e1a9f2824dc11f4e17e1df1f44dc4fdeb94c5450d34bcb2602"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90443994bbafe134f0b34201dad3ebe1c769f0599004084e046fb249ad912425"}, - {file = "jiter-0.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f9abf464f9faac652542ce8360cea8e68fba2b78350e8a170248f9bcc228702a"}, - {file = "jiter-0.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db7a8d99fc5f842f7d2852f06ccaed066532292c41723e5dff670c339b649f88"}, - {file = "jiter-0.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:15cf691ebd8693b70c94627d6b748f01e6d697d9a6e9f2bc310934fcfb7cf25e"}, - {file = "jiter-0.7.0-cp312-none-win32.whl", hash = "sha256:9dcd54fa422fb66ca398bec296fed5f58e756aa0589496011cfea2abb5be38a5"}, - {file = "jiter-0.7.0-cp312-none-win_amd64.whl", hash = "sha256:cc989951f73f9375b8eacd571baaa057f3d7d11b7ce6f67b9d54642e7475bfad"}, - {file = "jiter-0.7.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:24cecd18df540963cd27c08ca5ce1d0179f229ff78066d9eecbe5add29361340"}, - {file = "jiter-0.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d41b46236b90b043cca73785674c23d2a67d16f226394079d0953f94e765ed76"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b160db0987171365c153e406a45dcab0ee613ae3508a77bfff42515cb4ce4d6e"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d1c8d91e0f0bd78602eaa081332e8ee4f512c000716f5bc54e9a037306d693a7"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997706c683195eeff192d2e5285ce64d2a610414f37da3a3f2625dcf8517cf90"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ea52a8a0ff0229ab2920284079becd2bae0688d432fca94857ece83bb49c541"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d77449d2738cf74752bb35d75ee431af457e741124d1db5e112890023572c7c"}, - {file = "jiter-0.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8203519907a1d81d6cb00902c98e27c2d0bf25ce0323c50ca594d30f5f1fbcf"}, - {file = "jiter-0.7.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41d15ccc53931c822dd7f1aebf09faa3cda2d7b48a76ef304c7dbc19d1302e51"}, - {file = "jiter-0.7.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:febf3179b2fabf71fbd2fd52acb8594163bb173348b388649567a548f356dbf6"}, - {file = "jiter-0.7.0-cp313-none-win32.whl", hash = "sha256:4a8e2d866e7eda19f012444e01b55079d8e1c4c30346aaac4b97e80c54e2d6d3"}, - {file = "jiter-0.7.0-cp313-none-win_amd64.whl", hash = "sha256:7417c2b928062c496f381fb0cb50412eee5ad1d8b53dbc0e011ce45bb2de522c"}, - {file = "jiter-0.7.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9c62c737b5368e51e74960a08fe1adc807bd270227291daede78db24d5fbf556"}, - {file = "jiter-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e4640722b1bef0f6e342fe4606aafaae0eb4f4be5c84355bb6867f34400f6688"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f367488c3b9453eab285424c61098faa1cab37bb49425e69c8dca34f2dfe7d69"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0cf5d42beb3514236459454e3287db53d9c4d56c4ebaa3e9d0efe81b19495129"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cc5190ea1113ee6f7252fa8a5fe5a6515422e378356c950a03bbde5cafbdbaab"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63ee47a149d698796a87abe445fc8dee21ed880f09469700c76c8d84e0d11efd"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48592c26ea72d3e71aa4bea0a93454df907d80638c3046bb0705507b6704c0d7"}, - {file = "jiter-0.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:79fef541199bd91cfe8a74529ecccb8eaf1aca38ad899ea582ebbd4854af1e51"}, - {file = "jiter-0.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d1ef6bb66041f2514739240568136c81b9dcc64fd14a43691c17ea793b6535c0"}, - {file = "jiter-0.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aca4d950863b1c238e315bf159466e064c98743eef3bd0ff9617e48ff63a4715"}, - {file = "jiter-0.7.0-cp38-none-win32.whl", hash = "sha256:897745f230350dcedb8d1ebe53e33568d48ea122c25e6784402b6e4e88169be7"}, - {file = "jiter-0.7.0-cp38-none-win_amd64.whl", hash = "sha256:b928c76a422ef3d0c85c5e98c498ce3421b313c5246199541e125b52953e1bc0"}, - {file = "jiter-0.7.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c9b669ff6f8ba08270dee9ccf858d3b0203b42314a428a1676762f2d390fbb64"}, - {file = "jiter-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5be919bacd73ca93801c3042bce6e95cb9c555a45ca83617b9b6c89df03b9c2"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a282e1e8a396dabcea82d64f9d05acf7efcf81ecdd925b967020dcb0e671c103"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:17ecb1a578a56e97a043c72b463776b5ea30343125308f667fb8fce4b3796735"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b6045fa0527129218cdcd8a8b839f678219686055f31ebab35f87d354d9c36e"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:189cc4262a92e33c19d4fd24018f5890e4e6da5b2581f0059938877943f8298c"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c138414839effbf30d185e30475c6dc8a16411a1e3681e5fd4605ab1233ac67a"}, - {file = "jiter-0.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2791604acef33da6b72d5ecf885a32384bcaf9aa1e4be32737f3b8b9588eef6a"}, - {file = "jiter-0.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae60ec89037a78d60bbf3d8b127f1567769c8fa24886e0abed3f622791dea478"}, - {file = "jiter-0.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:836f03dea312967635233d826f783309b98cfd9ccc76ac776e224cfcef577862"}, - {file = "jiter-0.7.0-cp39-none-win32.whl", hash = "sha256:ebc30ae2ce4bc4986e1764c404b4ea1924f926abf02ce92516485098f8545374"}, - {file = "jiter-0.7.0-cp39-none-win_amd64.whl", hash = "sha256:abf596f951370c648f37aa9899deab296c42a3829736e598b0dd10b08f77a44d"}, - {file = "jiter-0.7.0.tar.gz", hash = "sha256:c061d9738535497b5509f8970584f20de1e900806b239a39a9994fc191dad630"}, + {file = "jiter-0.7.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:262e96d06696b673fad6f257e6a0abb6e873dc22818ca0e0600f4a1189eb334f"}, + {file = "jiter-0.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be6de02939aac5be97eb437f45cfd279b1dc9de358b13ea6e040e63a3221c40d"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935f10b802bc1ce2b2f61843e498c7720aa7f4e4bb7797aa8121eab017293c3d"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9cd3cccccabf5064e4bb3099c87bf67db94f805c1e62d1aefd2b7476e90e0ee2"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4aa919ebfc5f7b027cc368fe3964c0015e1963b92e1db382419dadb098a05192"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ae2d01e82c94491ce4d6f461a837f63b6c4e6dd5bb082553a70c509034ff3d4"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f9568cd66dbbdab67ae1b4c99f3f7da1228c5682d65913e3f5f95586b3cb9a9"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ecbf4e20ec2c26512736284dc1a3f8ed79b6ca7188e3b99032757ad48db97dc"}, + {file = "jiter-0.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1a0508fddc70ce00b872e463b387d49308ef02b0787992ca471c8d4ba1c0fa1"}, + {file = "jiter-0.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f84c9996664c460f24213ff1e5881530abd8fafd82058d39af3682d5fd2d6316"}, + {file = "jiter-0.7.1-cp310-none-win32.whl", hash = "sha256:c915e1a1960976ba4dfe06551ea87063b2d5b4d30759012210099e712a414d9f"}, + {file = "jiter-0.7.1-cp310-none-win_amd64.whl", hash = "sha256:75bf3b7fdc5c0faa6ffffcf8028a1f974d126bac86d96490d1b51b3210aa0f3f"}, + {file = "jiter-0.7.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ad04a23a91f3d10d69d6c87a5f4471b61c2c5cd6e112e85136594a02043f462c"}, + {file = "jiter-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e47a554de88dff701226bb5722b7f1b6bccd0b98f1748459b7e56acac2707a5"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e44fff69c814a2e96a20b4ecee3e2365e9b15cf5fe4e00869d18396daa91dab"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df0a1d05081541b45743c965436f8b5a1048d6fd726e4a030113a2699a6046ea"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f22cf8f236a645cb6d8ffe2a64edb5d2b66fb148bf7c75eea0cb36d17014a7bc"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8589f50b728ea4bf22e0632eefa125c8aa9c38ed202a5ee6ca371f05eeb3ff"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f20de711224f2ca2dbb166a8d512f6ff48c9c38cc06b51f796520eb4722cc2ce"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a9803396032117b85ec8cbf008a54590644a062fedd0425cbdb95e4b2b60479"}, + {file = "jiter-0.7.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3d8bae77c82741032e9d89a4026479061aba6e646de3bf5f2fc1ae2bbd9d06e0"}, + {file = "jiter-0.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3dc9939e576bbc68c813fc82f6620353ed68c194c7bcf3d58dc822591ec12490"}, + {file = "jiter-0.7.1-cp311-none-win32.whl", hash = "sha256:f7605d24cd6fab156ec89e7924578e21604feee9c4f1e9da34d8b67f63e54892"}, + {file = "jiter-0.7.1-cp311-none-win_amd64.whl", hash = "sha256:f3ea649e7751a1a29ea5ecc03c4ada0a833846c59c6da75d747899f9b48b7282"}, + {file = "jiter-0.7.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ad36a1155cbd92e7a084a568f7dc6023497df781adf2390c345dd77a120905ca"}, + {file = "jiter-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7ba52e6aaed2dc5c81a3d9b5e4ab95b039c4592c66ac973879ba57c3506492bb"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b7de0b6f6728b678540c7927587e23f715284596724be203af952418acb8a2d"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9463b62bd53c2fb85529c700c6a3beb2ee54fde8bef714b150601616dcb184a6"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:627164ec01d28af56e1f549da84caf0fe06da3880ebc7b7ee1ca15df106ae172"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25d0e5bf64e368b0aa9e0a559c3ab2f9b67e35fe7269e8a0d81f48bbd10e8963"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c244261306f08f8008b3087059601997016549cb8bb23cf4317a4827f07b7d74"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ded4e4b75b68b843b7cea5cd7c55f738c20e1394c68c2cb10adb655526c5f1b"}, + {file = "jiter-0.7.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:80dae4f1889b9d09e5f4de6b58c490d9c8ce7730e35e0b8643ab62b1538f095c"}, + {file = "jiter-0.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5970cf8ec943b51bce7f4b98d2e1ed3ada170c2a789e2db3cb484486591a176a"}, + {file = "jiter-0.7.1-cp312-none-win32.whl", hash = "sha256:701d90220d6ecb3125d46853c8ca8a5bc158de8c49af60fd706475a49fee157e"}, + {file = "jiter-0.7.1-cp312-none-win_amd64.whl", hash = "sha256:7824c3ecf9ecf3321c37f4e4d4411aad49c666ee5bc2a937071bdd80917e4533"}, + {file = "jiter-0.7.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:097676a37778ba3c80cb53f34abd6943ceb0848263c21bf423ae98b090f6c6ba"}, + {file = "jiter-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3298af506d4271257c0a8f48668b0f47048d69351675dd8500f22420d4eec378"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12fd88cfe6067e2199964839c19bd2b422ca3fd792949b8f44bb8a4e7d21946a"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dacca921efcd21939123c8ea8883a54b9fa7f6545c8019ffcf4f762985b6d0c8"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de3674a5fe1f6713a746d25ad9c32cd32fadc824e64b9d6159b3b34fd9134143"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65df9dbae6d67e0788a05b4bad5706ad40f6f911e0137eb416b9eead6ba6f044"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ba9a358d59a0a55cccaa4957e6ae10b1a25ffdabda863c0343c51817610501d"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:576eb0f0c6207e9ede2b11ec01d9c2182973986514f9c60bc3b3b5d5798c8f50"}, + {file = "jiter-0.7.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:e550e29cdf3577d2c970a18f3959e6b8646fd60ef1b0507e5947dc73703b5627"}, + {file = "jiter-0.7.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:81d968dbf3ce0db2e0e4dec6b0a0d5d94f846ee84caf779b07cab49f5325ae43"}, + {file = "jiter-0.7.1-cp313-none-win32.whl", hash = "sha256:f892e547e6e79a1506eb571a676cf2f480a4533675f834e9ae98de84f9b941ac"}, + {file = "jiter-0.7.1-cp313-none-win_amd64.whl", hash = "sha256:0302f0940b1455b2a7fb0409b8d5b31183db70d2b07fd177906d83bf941385d1"}, + {file = "jiter-0.7.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c65a3ce72b679958b79d556473f192a4dfc5895e8cc1030c9f4e434690906076"}, + {file = "jiter-0.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e80052d3db39f9bb8eb86d207a1be3d9ecee5e05fdec31380817f9609ad38e60"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70a497859c4f3f7acd71c8bd89a6f9cf753ebacacf5e3e799138b8e1843084e3"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c1288bc22b9e36854a0536ba83666c3b1fb066b811019d7b682c9cf0269cdf9f"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b096ca72dd38ef35675e1d3b01785874315182243ef7aea9752cb62266ad516f"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8dbbd52c50b605af13dbee1a08373c520e6fcc6b5d32f17738875847fea4e2cd"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af29c5c6eb2517e71ffa15c7ae9509fa5e833ec2a99319ac88cc271eca865519"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f114a4df1e40c03c0efbf974b376ed57756a1141eb27d04baee0680c5af3d424"}, + {file = "jiter-0.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:191fbaee7cf46a9dd9b817547bf556facde50f83199d07fc48ebeff4082f9df4"}, + {file = "jiter-0.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0e2b445e5ee627fb4ee6bbceeb486251e60a0c881a8e12398dfdff47c56f0723"}, + {file = "jiter-0.7.1-cp38-none-win32.whl", hash = "sha256:47ac4c3cf8135c83e64755b7276339b26cd3c7ddadf9e67306ace4832b283edf"}, + {file = "jiter-0.7.1-cp38-none-win_amd64.whl", hash = "sha256:60b49c245cd90cde4794f5c30f123ee06ccf42fb8730a019a2870cd005653ebd"}, + {file = "jiter-0.7.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8f212eeacc7203256f526f550d105d8efa24605828382cd7d296b703181ff11d"}, + {file = "jiter-0.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d9e247079d88c00e75e297e6cb3a18a039ebcd79fefc43be9ba4eb7fb43eb726"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0aacaa56360139c53dcf352992b0331f4057a0373bbffd43f64ba0c32d2d155"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc1b55314ca97dbb6c48d9144323896e9c1a25d41c65bcb9550b3e0c270ca560"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f281aae41b47e90deb70e7386558e877a8e62e1693e0086f37d015fa1c102289"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93c20d2730a84d43f7c0b6fb2579dc54335db742a59cf9776d0b80e99d587382"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e81ccccd8069110e150613496deafa10da2f6ff322a707cbec2b0d52a87b9671"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a7d5e85766eff4c9be481d77e2226b4c259999cb6862ccac5ef6621d3c8dcce"}, + {file = "jiter-0.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f52ce5799df5b6975439ecb16b1e879d7655e1685b6e3758c9b1b97696313bfb"}, + {file = "jiter-0.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0c91a0304373fdf97d56f88356a010bba442e6d995eb7773cbe32885b71cdd8"}, + {file = "jiter-0.7.1-cp39-none-win32.whl", hash = "sha256:5c08adf93e41ce2755970e8aa95262298afe2bf58897fb9653c47cd93c3c6cdc"}, + {file = "jiter-0.7.1-cp39-none-win_amd64.whl", hash = "sha256:6592f4067c74176e5f369228fb2995ed01400c9e8e1225fb73417183a5e635f0"}, + {file = "jiter-0.7.1.tar.gz", hash = "sha256:448cf4f74f7363c34cdef26214da527e8eeffd88ba06d0b80b485ad0667baf5d"}, ] [[package]] @@ -4739,13 +4690,13 @@ openai = ["openai (>=0.27.8)"] [[package]] name = "langsmith" -version = "0.1.142" +version = "0.1.143" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.142-py3-none-any.whl", hash = "sha256:f639ca23c9a0bb77af5fb881679b2f66ff1f21f19d0bebf4e51375e7585a8b38"}, - {file = "langsmith-0.1.142.tar.gz", hash = "sha256:f8a84d100f3052233ff0a1d66ae14c5dfc20b7e41a1601de011384f16ee6cb82"}, + {file = "langsmith-0.1.143-py3-none-any.whl", hash = "sha256:ba0d827269e9b03a90fababe41fa3e4e3f833300b95add10184f7e67167dde6f"}, + {file = "langsmith-0.1.143.tar.gz", hash = "sha256:4c5159e5cd84b3f8499433009e72d2076dd2daf6c044ac8a3611b30d0d0161c5"}, ] [package.dependencies] @@ -6507,12 +6458,12 @@ ppft = ">=1.7.6.9" [[package]] name = "peewee" -version = "3.17.7" +version = "3.17.8" description = "a little orm" optional = false python-versions = "*" files = [ - {file = "peewee-3.17.7.tar.gz", hash = "sha256:6aefc700bd530fc6ac23fa19c9c5b47041751d92985b799169c8e318e97eabaa"}, + {file = "peewee-3.17.8.tar.gz", hash = "sha256:ce1d05db3438830b989a1b9d0d0aa4e7f6134d5f6fd57686eeaa26a3e6485a8c"}, ] [[package]] @@ -7396,13 +7347,13 @@ rsa = ["cryptography"] [[package]] name = "pyobvector" -version = "0.1.10" +version = "0.1.13" description = "A python SDK for OceanBase Vector Store, based on SQLAlchemy, compatible with Milvus API." optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "pyobvector-0.1.10-py3-none-any.whl", hash = "sha256:7ef0d20c640a948c7fe64f2f3bd4defda395e65c617152643340ed440056238c"}, - {file = "pyobvector-0.1.10.tar.gz", hash = "sha256:30a7ad42ff8be0bf0c37a33d1acfb8b948e7f9b6ac3d482b85f9761c41af9bfb"}, + {file = "pyobvector-0.1.13-py3-none-any.whl", hash = "sha256:b6a9e7a4673aebeefe835e04f7474d2f2ef8b9c96982af41cf9ce6f3e3500fdb"}, + {file = "pyobvector-0.1.13.tar.gz", hash = "sha256:e4b8f3ba3ad142cd7584b36278a38c0ef2fe7b6af142cdf5467d988e0737e03e"}, ] [package.dependencies] @@ -8536,29 +8487,29 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.7.3" +version = "0.7.4" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.7.3-py3-none-linux_armv6l.whl", hash = "sha256:34f2339dc22687ec7e7002792d1f50712bf84a13d5152e75712ac08be565d344"}, - {file = "ruff-0.7.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:fb397332a1879b9764a3455a0bb1087bda876c2db8aca3a3cbb67b3dbce8cda0"}, - {file = "ruff-0.7.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:37d0b619546103274e7f62643d14e1adcbccb242efda4e4bdb9544d7764782e9"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59f0c3ee4d1a6787614e7135b72e21024875266101142a09a61439cb6e38a5"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:44eb93c2499a169d49fafd07bc62ac89b1bc800b197e50ff4633aed212569299"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d0242ce53f3a576c35ee32d907475a8d569944c0407f91d207c8af5be5dae4e"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6b6224af8b5e09772c2ecb8dc9f3f344c1aa48201c7f07e7315367f6dd90ac29"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c50f95a82b94421c964fae4c27c0242890a20fe67d203d127e84fbb8013855f5"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f3eff9961b5d2644bcf1616c606e93baa2d6b349e8aa8b035f654df252c8c67"}, - {file = "ruff-0.7.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8963cab06d130c4df2fd52c84e9f10d297826d2e8169ae0c798b6221be1d1d2"}, - {file = "ruff-0.7.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:61b46049d6edc0e4317fb14b33bd693245281a3007288b68a3f5b74a22a0746d"}, - {file = "ruff-0.7.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:10ebce7696afe4644e8c1a23b3cf8c0f2193a310c18387c06e583ae9ef284de2"}, - {file = "ruff-0.7.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3f36d56326b3aef8eeee150b700e519880d1aab92f471eefdef656fd57492aa2"}, - {file = "ruff-0.7.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5d024301109a0007b78d57ab0ba190087b43dce852e552734ebf0b0b85e4fb16"}, - {file = "ruff-0.7.3-py3-none-win32.whl", hash = "sha256:4ba81a5f0c5478aa61674c5a2194de8b02652f17addf8dfc40c8937e6e7d79fc"}, - {file = "ruff-0.7.3-py3-none-win_amd64.whl", hash = "sha256:588a9ff2fecf01025ed065fe28809cd5a53b43505f48b69a1ac7707b1b7e4088"}, - {file = "ruff-0.7.3-py3-none-win_arm64.whl", hash = "sha256:1713e2c5545863cdbfe2cbce21f69ffaf37b813bfd1fb3b90dc9a6f1963f5a8c"}, - {file = "ruff-0.7.3.tar.gz", hash = "sha256:e1d1ba2e40b6e71a61b063354d04be669ab0d39c352461f3d789cac68b54a313"}, + {file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"}, + {file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"}, + {file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"}, + {file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"}, + {file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"}, + {file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"}, + {file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"}, ] [[package]] @@ -8755,13 +8706,13 @@ test = ["accelerate (>=0.24.1,<=0.27.0)", "apache-airflow (==2.9.3)", "apache-ai [[package]] name = "sagemaker-core" -version = "1.0.13" +version = "1.0.14" description = "An python package for sagemaker core functionalities" optional = false python-versions = ">=3.8" files = [ - {file = "sagemaker_core-1.0.13-py3-none-any.whl", hash = "sha256:260613f3b12f3078e291419c5621f8d44a4e0ef4e3ae21af788c0fb2d981973b"}, - {file = "sagemaker_core-1.0.13.tar.gz", hash = "sha256:4ab3d99e7b2e0db81dbe7b4204cda93bc5f183d951434a1f9d71913305724dc7"}, + {file = "sagemaker_core-1.0.14-py3-none-any.whl", hash = "sha256:b47804d56a5b29967e6f29510e978d8ed541536c44e5aecef4fdecfafaba6aaa"}, + {file = "sagemaker_core-1.0.14.tar.gz", hash = "sha256:e73b8adfb1ae2f82c948f4a976222acc9e13f0d051129b332a034d3e05908857"}, ] [package.dependencies] @@ -8935,23 +8886,23 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "75.3.0" +version = "75.5.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"}, - {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"}, + {file = "setuptools-75.5.0-py3-none-any.whl", hash = "sha256:87cb777c3b96d638ca02031192d40390e0ad97737e27b6b4fa831bea86f2f829"}, + {file = "setuptools-75.5.0.tar.gz", hash = "sha256:5c4ccb41111392671f02bb5f8436dfc5a9a7185e80500531b133f5775c4163ef"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib-metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] name = "sgmllib3k" @@ -9202,13 +9153,13 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sqlparse" -version = "0.5.1" +version = "0.5.2" description = "A non-validating SQL parser." optional = false python-versions = ">=3.8" files = [ - {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, - {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, + {file = "sqlparse-0.5.2-py3-none-any.whl", hash = "sha256:e99bc85c78160918c3e1d9230834ab8d80fc06c59d03f8db2618f65f65dda55e"}, + {file = "sqlparse-0.5.2.tar.gz", hash = "sha256:9e37b35e16d1cc652a2545f0997c1deb23ea28fa1f3eefe609eee3063c3b105f"}, ] [package.extras] @@ -9386,13 +9337,13 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "tencentcloud-sdk-python-common" -version = "3.0.1263" +version = "3.0.1266" description = "Tencent Cloud Common SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-common-3.0.1263.tar.gz", hash = "sha256:3091024ece07982ec4829c661bc90474d2b9c5543965717f7136b9f66b201c34"}, - {file = "tencentcloud_sdk_python_common-3.0.1263-py2.py3-none-any.whl", hash = "sha256:812cdc2d183d455472f8fee88d699acb869a8d8497cd09cd6d83596a98a8e6d7"}, + {file = "tencentcloud-sdk-python-common-3.0.1266.tar.gz", hash = "sha256:3b1733a74138b66696c19263e6f579ac4bd7fc6048ffe7cb7d1774ecd09720f6"}, + {file = "tencentcloud_sdk_python_common-3.0.1266-py2.py3-none-any.whl", hash = "sha256:f6d89ee5f2c71cd701e2f55b4bd3cf4ed69619a7514eee66a7f79fe9ac65d02a"}, ] [package.dependencies] @@ -9400,17 +9351,17 @@ requests = ">=2.16.0" [[package]] name = "tencentcloud-sdk-python-hunyuan" -version = "3.0.1263" +version = "3.0.1266" description = "Tencent Cloud Hunyuan SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-hunyuan-3.0.1263.tar.gz", hash = "sha256:4e9c0120ca7eca48983afec7ff6a04a4bd75c347070f942a7edd378c5f9b2767"}, - {file = "tencentcloud_sdk_python_hunyuan-3.0.1263-py2.py3-none-any.whl", hash = "sha256:37446ef71d50a91dfe06d7c1704b1841aab079da29dc91099d2b793779e18dc2"}, + {file = "tencentcloud-sdk-python-hunyuan-3.0.1266.tar.gz", hash = "sha256:dcff322290fd4e7c40067c4e80ac9bf19867601c195d6a505f2f3fa1b97cc2ec"}, + {file = "tencentcloud_sdk_python_hunyuan-3.0.1266-py2.py3-none-any.whl", hash = "sha256:e2f8d156df33e157fc93b70a0574a2da5d2bbb8f98f0b3e7a5783d6c8c072c2b"}, ] [package.dependencies] -tencentcloud-sdk-python-common = "3.0.1263" +tencentcloud-sdk-python-common = "3.0.1266" [[package]] name = "termcolor" @@ -9672,13 +9623,13 @@ files = [ [[package]] name = "tomli" -version = "2.0.2" +version = "2.1.0" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, - {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, + {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, + {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, ] [[package]] @@ -10847,6 +10798,20 @@ requests = ">=2.31" nospam = ["requests-cache (>=1.0)", "requests-ratelimiter (>=0.3.1)"] repair = ["scipy (>=1.6.3)"] +[[package]] +name = "youtube-transcript-api" +version = "0.6.2" +description = "This is an python API which allows you to get the transcripts/subtitles for a given YouTube video. It also works for automatically generated subtitles, supports translating subtitles and it does not require a headless browser, like other selenium based solutions do!" +optional = false +python-versions = "*" +files = [ + {file = "youtube_transcript_api-0.6.2-py3-none-any.whl", hash = "sha256:019dbf265c6a68a0591c513fff25ed5a116ce6525832aefdfb34d4df5567121c"}, + {file = "youtube_transcript_api-0.6.2.tar.gz", hash = "sha256:cad223d7620633cec44f657646bffc8bbc5598bd8e70b1ad2fa8277dec305eb7"}, +] + +[package.dependencies] +requests = "*" + [[package]] name = "zhipuai" version = "2.1.5.20230904" @@ -11071,4 +11036,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "2ba4b464eebc26598f290fa94713acc44c588f902176e6efa80622911d40f0ac" +content-hash = "69a3f471f85dce9e5fb889f739e148a4a6d95aaf94081414503867c7157dba69" diff --git a/api/pyproject.toml b/api/pyproject.toml index 0633e9dd90..0d87c1b1c8 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -35,6 +35,7 @@ select = [ "S506", # unsafe-yaml-load "SIM", # flake8-simplify rules "TRY400", # error-instead-of-exception + "TRY401", # verbose-log-message "UP", # pyupgrade rules "W191", # tab-indentation "W605", # invalid-escape-sequence @@ -186,6 +187,7 @@ websocket-client = "~1.7.0" werkzeug = "~3.0.1" xinference-client = "0.15.2" yarl = "~1.9.4" +youtube-transcript-api = "~0.6.2" zhipuai = "~2.1.5" # Before adding new dependency, consider place it in alphabet order (a-z) and suitable group. diff --git a/api/schedule/clean_messages.py b/api/schedule/clean_messages.py new file mode 100644 index 0000000000..72ee2a8901 --- /dev/null +++ b/api/schedule/clean_messages.py @@ -0,0 +1,79 @@ +import datetime +import time + +import click +from werkzeug.exceptions import NotFound + +import app +from configs import dify_config +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.model import ( + App, + Message, + MessageAgentThought, + MessageAnnotation, + MessageChain, + MessageFeedback, + MessageFile, +) +from models.web import SavedMessage +from services.feature_service import FeatureService + + +@app.celery.task(queue="dataset") +def clean_messages(): + click.echo(click.style("Start clean messages.", fg="green")) + start_at = time.perf_counter() + plan_sandbox_clean_message_day = datetime.datetime.now() - datetime.timedelta( + days=dify_config.PLAN_SANDBOX_CLEAN_MESSAGE_DAY_SETTING + ) + page = 1 + while True: + try: + # Main query with join and filter + messages = ( + db.session.query(Message) + .filter(Message.created_at < plan_sandbox_clean_message_day) + .order_by(Message.created_at.desc()) + .paginate(page=page, per_page=100) + ) + + except NotFound: + break + if messages.items is None or len(messages.items) == 0: + break + for message in messages.items: + app = App.query.filter_by(id=message.app_id).first() + features_cache_key = f"features:{app.tenant_id}" + plan_cache = redis_client.get(features_cache_key) + if plan_cache is None: + features = FeatureService.get_features(app.tenant_id) + redis_client.setex(features_cache_key, 600, features.billing.subscription.plan) + plan = features.billing.subscription.plan + else: + plan = plan_cache.decode() + if plan == "sandbox": + # clean related message + db.session.query(MessageFeedback).filter(MessageFeedback.message_id == message.id).delete( + synchronize_session=False + ) + db.session.query(MessageAnnotation).filter(MessageAnnotation.message_id == message.id).delete( + synchronize_session=False + ) + db.session.query(MessageChain).filter(MessageChain.message_id == message.id).delete( + synchronize_session=False + ) + db.session.query(MessageAgentThought).filter(MessageAgentThought.message_id == message.id).delete( + synchronize_session=False + ) + db.session.query(MessageFile).filter(MessageFile.message_id == message.id).delete( + synchronize_session=False + ) + db.session.query(SavedMessage).filter(SavedMessage.message_id == message.id).delete( + synchronize_session=False + ) + db.session.query(Message).filter(Message.id == message.id).delete() + db.session.commit() + end_at = time.perf_counter() + click.echo(click.style("Cleaned unused dataset from db success latency: {}".format(end_at - start_at), fg="green")) diff --git a/api/schedule/clean_unused_datasets_task.py b/api/schedule/clean_unused_datasets_task.py index 100fd8dfab..e12be649e4 100644 --- a/api/schedule/clean_unused_datasets_task.py +++ b/api/schedule/clean_unused_datasets_task.py @@ -22,7 +22,6 @@ def clean_unused_datasets_task(): start_at = time.perf_counter() plan_sandbox_clean_day = datetime.datetime.now() - datetime.timedelta(days=plan_sandbox_clean_day_setting) plan_pro_clean_day = datetime.datetime.now() - datetime.timedelta(days=plan_pro_clean_day_setting) - page = 1 while True: try: # Subquery for counting new documents @@ -62,14 +61,13 @@ def clean_unused_datasets_task(): func.coalesce(document_subquery_old.c.document_count, 0) > 0, ) .order_by(Dataset.created_at.desc()) - .paginate(page=page, per_page=50) + .paginate(page=1, per_page=50) ) except NotFound: break if datasets.items is None or len(datasets.items) == 0: break - page += 1 for dataset in datasets: dataset_query = ( db.session.query(DatasetQuery) @@ -92,7 +90,6 @@ def clean_unused_datasets_task(): click.echo( click.style("clean dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red") ) - page = 1 while True: try: # Subquery for counting new documents @@ -132,14 +129,13 @@ def clean_unused_datasets_task(): func.coalesce(document_subquery_old.c.document_count, 0) > 0, ) .order_by(Dataset.created_at.desc()) - .paginate(page=page, per_page=50) + .paginate(page=1, per_page=50) ) except NotFound: break if datasets.items is None or len(datasets.items) == 0: break - page += 1 for dataset in datasets: dataset_query = ( db.session.query(DatasetQuery) @@ -149,11 +145,13 @@ def clean_unused_datasets_task(): if not dataset_query or len(dataset_query) == 0: try: features_cache_key = f"features:{dataset.tenant_id}" - plan = redis_client.get(features_cache_key) - if plan is None: + plan_cache = redis_client.get(features_cache_key) + if plan_cache is None: features = FeatureService.get_features(dataset.tenant_id) redis_client.setex(features_cache_key, 600, features.billing.subscription.plan) plan = features.billing.subscription.plan + else: + plan = plan_cache.decode() if plan == "sandbox": # remove index index_processor = IndexProcessorFactory(dataset.doc_form).init_index_processor() diff --git a/api/services/account_service.py b/api/services/account_service.py index 68687eb3d2..3eca0e62e7 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -198,9 +198,9 @@ class AccountService: ) -> Account: """create account""" if not FeatureService.get_system_features().is_allow_register and not is_setup: - from controllers.console.error import NotAllowedRegister + from controllers.console.error import AccountNotFound - raise NotAllowedRegister() + raise AccountNotFound() account = Account() account.email = email account.name = name @@ -779,7 +779,7 @@ class RegisterService: db.session.query(Tenant).delete() db.session.commit() - logging.exception(f"Setup failed: {e}") + logging.exception(f"Setup account failed, email: {email}, name: {name}") raise ValueError(f"Setup failed: {e}") @classmethod @@ -821,7 +821,7 @@ class RegisterService: db.session.rollback() except Exception as e: db.session.rollback() - logging.exception(f"Register failed: {e}") + logging.exception("Register failed") raise AccountRegisterError(f"Registration failed: {e}") from e return account diff --git a/api/services/app_service.py b/api/services/app_service.py index cd20a13d5a..ab86eada32 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -88,7 +88,7 @@ class AppService: except (ProviderTokenNotInitError, LLMBadRequestError): model_instance = None except Exception as e: - logging.exception(e) + logging.exception(f"Get default model instance failed, tenant_id: {tenant_id}") model_instance = None if model_instance: diff --git a/api/services/feature_service.py b/api/services/feature_service.py index 4d0a5f67ce..6fe2f97d82 100644 --- a/api/services/feature_service.py +++ b/api/services/feature_service.py @@ -1,3 +1,5 @@ +from enum import Enum + from pydantic import BaseModel, ConfigDict from configs import dify_config @@ -20,6 +22,20 @@ class LimitationModel(BaseModel): limit: int = 0 +class LicenseStatus(str, Enum): + NONE = "none" + INACTIVE = "inactive" + ACTIVE = "active" + EXPIRING = "expiring" + EXPIRED = "expired" + LOST = "lost" + + +class LicenseModel(BaseModel): + status: LicenseStatus = LicenseStatus.NONE + expired_at: str = "" + + class FeatureModel(BaseModel): billing: BillingModel = BillingModel() members: LimitationModel = LimitationModel(size=0, limit=1) @@ -49,6 +65,7 @@ class SystemFeatureModel(BaseModel): enable_social_oauth_login: bool = False is_allow_register: bool = False is_allow_create_workspace: bool = False + license: LicenseModel = LicenseModel() class FeatureService: @@ -136,17 +153,31 @@ class FeatureService: if "sso_enforced_for_signin" in enterprise_info: features.sso_enforced_for_signin = enterprise_info["sso_enforced_for_signin"] + if "sso_enforced_for_signin_protocol" in enterprise_info: features.sso_enforced_for_signin_protocol = enterprise_info["sso_enforced_for_signin_protocol"] + if "sso_enforced_for_web" in enterprise_info: features.sso_enforced_for_web = enterprise_info["sso_enforced_for_web"] + if "sso_enforced_for_web_protocol" in enterprise_info: features.sso_enforced_for_web_protocol = enterprise_info["sso_enforced_for_web_protocol"] + if "enable_email_code_login" in enterprise_info: features.enable_email_code_login = enterprise_info["enable_email_code_login"] + if "enable_email_password_login" in enterprise_info: features.enable_email_password_login = enterprise_info["enable_email_password_login"] + if "is_allow_register" in enterprise_info: features.is_allow_register = enterprise_info["is_allow_register"] + if "is_allow_create_workspace" in enterprise_info: features.is_allow_create_workspace = enterprise_info["is_allow_create_workspace"] + + if "license" in enterprise_info: + if "status" in enterprise_info["license"]: + features.license.status = enterprise_info["license"]["status"] + + if "expired_at" in enterprise_info["license"]: + features.license.expired_at = enterprise_info["license"]["expired_at"] diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index ee56a5db94..0e70b5e94d 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -197,7 +197,7 @@ class ApiToolManageService: # try to parse schema, avoid SSRF attack ApiToolManageService.parser_api_schema(schema) except Exception as e: - logger.exception(f"parse api schema error: {str(e)}") + logger.exception("parse api schema error") raise ValueError("invalid schema, please check the url you provided") return {"schema": schema} diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index f34f734de6..6054c9163b 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -196,7 +196,7 @@ class ToolTransformService: username = user.name except Exception as e: - logger.exception(f"failed to get user name for api provider {db_provider.id}: {str(e)}") + logger.exception(f"failed to get user name for api provider {db_provider.id}") # add provider into providers credentials = db_provider.credentials result = ToolProviderApiEntity( diff --git a/api/tasks/annotation/delete_annotation_index_task.py b/api/tasks/annotation/delete_annotation_index_task.py index 5758db53de..f0f6b32b06 100644 --- a/api/tasks/annotation/delete_annotation_index_task.py +++ b/api/tasks/annotation/delete_annotation_index_task.py @@ -38,4 +38,4 @@ def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green") ) except Exception as e: - logging.exception("Annotation deleted index failed:{}".format(str(e))) + logging.exception("Annotation deleted index failed") diff --git a/api/tasks/annotation/disable_annotation_reply_task.py b/api/tasks/annotation/disable_annotation_reply_task.py index 0f83dfdbd4..a2f4913513 100644 --- a/api/tasks/annotation/disable_annotation_reply_task.py +++ b/api/tasks/annotation/disable_annotation_reply_task.py @@ -60,7 +60,7 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str): click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green") ) except Exception as e: - logging.exception("Annotation batch deleted index failed:{}".format(str(e))) + logging.exception("Annotation batch deleted index failed") redis_client.setex(disable_app_annotation_job_key, 600, "error") disable_app_annotation_error_key = "disable_app_annotation_error_{}".format(str(job_id)) redis_client.setex(disable_app_annotation_error_key, 600, str(e)) diff --git a/api/tasks/annotation/enable_annotation_reply_task.py b/api/tasks/annotation/enable_annotation_reply_task.py index 82b70f6b71..e819bf3635 100644 --- a/api/tasks/annotation/enable_annotation_reply_task.py +++ b/api/tasks/annotation/enable_annotation_reply_task.py @@ -93,7 +93,7 @@ def enable_annotation_reply_task( click.style("App annotations added to index: {} latency: {}".format(app_id, end_at - start_at), fg="green") ) except Exception as e: - logging.exception("Annotation batch created index failed:{}".format(str(e))) + logging.exception("Annotation batch created index failed") redis_client.setex(enable_app_annotation_job_key, 600, "error") enable_app_annotation_error_key = "enable_app_annotation_error_{}".format(str(job_id)) redis_client.setex(enable_app_annotation_error_key, 600, str(e)) diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py index d1b41f2675..5ee72c27fc 100644 --- a/api/tasks/batch_create_segment_to_index_task.py +++ b/api/tasks/batch_create_segment_to_index_task.py @@ -103,5 +103,5 @@ def batch_create_segment_to_index_task( click.style("Segment batch created job: {} latency: {}".format(job_id, end_at - start_at), fg="green") ) except Exception as e: - logging.exception("Segments batch created index failed:{}".format(str(e))) + logging.exception("Segments batch created index failed") redis_client.setex(indexing_cache_key, 600, "error") diff --git a/api/tests/integration_tests/vdb/analyticdb/test_analyticdb.py b/api/tests/integration_tests/vdb/analyticdb/test_analyticdb.py index 970b98edc3..4f44d2ffd6 100644 --- a/api/tests/integration_tests/vdb/analyticdb/test_analyticdb.py +++ b/api/tests/integration_tests/vdb/analyticdb/test_analyticdb.py @@ -1,27 +1,43 @@ from core.rag.datasource.vdb.analyticdb.analyticdb_vector import AnalyticdbConfig, AnalyticdbVector +from core.rag.datasource.vdb.analyticdb.analyticdb_vector_openapi import AnalyticdbVectorOpenAPIConfig +from core.rag.datasource.vdb.analyticdb.analyticdb_vector_sql import AnalyticdbVectorBySqlConfig from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, setup_mock_redis class AnalyticdbVectorTest(AbstractVectorTest): - def __init__(self): + def __init__(self, config_type: str): super().__init__() # Analyticdb requires collection_name length less than 60. # it's ok for normal usage. self.collection_name = self.collection_name.replace("_test", "") - self.vector = AnalyticdbVector( - collection_name=self.collection_name, - config=AnalyticdbConfig( - access_key_id="test_key_id", - access_key_secret="test_key_secret", - region_id="test_region", - instance_id="test_id", - account="test_account", - account_password="test_passwd", - namespace="difytest_namespace", - collection="difytest_collection", - namespace_password="test_passwd", - ), - ) + if config_type == "sql": + self.vector = AnalyticdbVector( + collection_name=self.collection_name, + sql_config=AnalyticdbVectorBySqlConfig( + host="test_host", + port=5432, + account="test_account", + account_password="test_passwd", + namespace="difytest_namespace", + ), + api_config=None, + ) + else: + self.vector = AnalyticdbVector( + collection_name=self.collection_name, + sql_config=None, + api_config=AnalyticdbVectorOpenAPIConfig( + access_key_id="test_key_id", + access_key_secret="test_key_secret", + region_id="test_region", + instance_id="test_id", + account="test_account", + account_password="test_passwd", + namespace="difytest_namespace", + collection="difytest_collection", + namespace_password="test_passwd", + ), + ) def run_all_tests(self): self.vector.delete() @@ -29,4 +45,5 @@ class AnalyticdbVectorTest(AbstractVectorTest): def test_chroma_vector(setup_mock_redis): - AnalyticdbVectorTest().run_all_tests() + AnalyticdbVectorTest("api").run_all_tests() + AnalyticdbVectorTest("sql").run_all_tests() diff --git a/api/tests/integration_tests/workflow/test_sync_workflow.py b/api/tests/integration_tests/workflow/test_sync_workflow.py index df2ec95ebc..be270cdc49 100644 --- a/api/tests/integration_tests/workflow/test_sync_workflow.py +++ b/api/tests/integration_tests/workflow/test_sync_workflow.py @@ -27,8 +27,8 @@ NEW_VERSION_WORKFLOW_FEATURES = { "file_upload": { "enabled": True, "allowed_file_types": ["image"], - "allowed_extensions": [], - "allowed_upload_methods": ["remote_url", "local_file"], + "allowed_file_extensions": [], + "allowed_file_upload_methods": ["remote_url", "local_file"], "number_limits": 6, }, "opening_statement": "", diff --git a/api/tests/unit_tests/core/test_file.py b/api/tests/unit_tests/core/test_file.py index aa61c1c6f7..4edbc01cc7 100644 --- a/api/tests/unit_tests/core/test_file.py +++ b/api/tests/unit_tests/core/test_file.py @@ -1,4 +1,7 @@ -from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType +import json + +from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType, FileUploadConfig +from models.workflow import Workflow def test_file_loads_and_dumps(): @@ -38,3 +41,40 @@ def test_file_to_dict(): file_dict = file.to_dict() assert "_extra_config" not in file_dict assert "url" in file_dict + + +def test_workflow_features_with_image(): + # Create a feature dict that mimics the old structure with image config + features = { + "file_upload": { + "image": {"enabled": True, "number_limits": 5, "transfer_methods": ["remote_url", "local_file"]} + } + } + + # Create a workflow instance with the features + workflow = Workflow( + tenant_id="tenant-1", + app_id="app-1", + type="chat", + version="1.0", + graph="{}", + features=json.dumps(features), + created_by="user-1", + environment_variables=[], + conversation_variables=[], + ) + + # Get the converted features through the property + converted_features = json.loads(workflow.features) + + # Create FileUploadConfig from the converted features + file_upload_config = FileUploadConfig.model_validate(converted_features["file_upload"]) + + # Validate the config + assert file_upload_config.number_limits == 5 + assert list(file_upload_config.allowed_file_types) == [FileType.IMAGE] + assert list(file_upload_config.allowed_file_upload_methods) == [ + FileTransferMethod.REMOTE_URL, + FileTransferMethod.LOCAL_FILE, + ] + assert list(file_upload_config.allowed_file_extensions) == [] diff --git a/api/tests/unit_tests/core/test_model_manager.py b/api/tests/unit_tests/core/test_model_manager.py index 2808b5b0fa..d98e9f6bad 100644 --- a/api/tests/unit_tests/core/test_model_manager.py +++ b/api/tests/unit_tests/core/test_model_manager.py @@ -1,10 +1,12 @@ -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch import pytest +import redis from core.entities.provider_entities import ModelLoadBalancingConfiguration from core.model_manager import LBModelManager from core.model_runtime.entities.model_entities import ModelType +from extensions.ext_redis import redis_client @pytest.fixture @@ -38,6 +40,9 @@ def lb_model_manager(): def test_lb_model_manager_fetch_next(mocker, lb_model_manager): + # initialize redis client + redis_client.initialize(redis.Redis()) + assert len(lb_model_manager._load_balancing_configs) == 3 config1 = lb_model_manager._load_balancing_configs[0] @@ -55,12 +60,13 @@ def test_lb_model_manager_fetch_next(mocker, lb_model_manager): start_index += 1 return start_index - mocker.patch("redis.Redis.incr", side_effect=incr) - mocker.patch("redis.Redis.set", return_value=None) - mocker.patch("redis.Redis.expire", return_value=None) + with ( + patch.object(redis_client, "incr", side_effect=incr), + patch.object(redis_client, "set", return_value=None), + patch.object(redis_client, "expire", return_value=None), + ): + config = lb_model_manager.fetch_next() + assert config == config2 - config = lb_model_manager.fetch_next() - assert config == config2 - - config = lb_model_manager.fetch_next() - assert config == config3 + config = lb_model_manager.fetch_next() + assert config == config3 diff --git a/api/tests/unit_tests/core/workflow/nodes/http_request/test_entities.py b/api/tests/unit_tests/core/workflow/nodes/http_request/test_entities.py new file mode 100644 index 0000000000..0f6b7e4ab6 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/nodes/http_request/test_entities.py @@ -0,0 +1,140 @@ +from unittest.mock import Mock, PropertyMock, patch + +import httpx +import pytest + +from core.workflow.nodes.http_request.entities import Response + + +@pytest.fixture +def mock_response(): + response = Mock(spec=httpx.Response) + response.headers = {} + return response + + +def test_is_file_with_attachment_disposition(mock_response): + """Test is_file when content-disposition header contains 'attachment'""" + mock_response.headers = {"content-disposition": "attachment; filename=test.pdf", "content-type": "application/pdf"} + response = Response(mock_response) + assert response.is_file + + +def test_is_file_with_filename_disposition(mock_response): + """Test is_file when content-disposition header contains filename parameter""" + mock_response.headers = {"content-disposition": "inline; filename=test.pdf", "content-type": "application/pdf"} + response = Response(mock_response) + assert response.is_file + + +@pytest.mark.parametrize("content_type", ["application/pdf", "image/jpeg", "audio/mp3", "video/mp4"]) +def test_is_file_with_file_content_types(mock_response, content_type): + """Test is_file with various file content types""" + mock_response.headers = {"content-type": content_type} + # Mock binary content + type(mock_response).content = PropertyMock(return_value=bytes([0x00, 0xFF] * 512)) + response = Response(mock_response) + assert response.is_file, f"Content type {content_type} should be identified as a file" + + +@pytest.mark.parametrize( + "content_type", + [ + "application/json", + "application/xml", + "application/javascript", + "application/x-www-form-urlencoded", + "application/yaml", + "application/graphql", + ], +) +def test_text_based_application_types(mock_response, content_type): + """Test common text-based application types are not identified as files""" + mock_response.headers = {"content-type": content_type} + response = Response(mock_response) + assert not response.is_file, f"Content type {content_type} should not be identified as a file" + + +@pytest.mark.parametrize( + ("content", "content_type"), + [ + (b'{"key": "value"}', "application/octet-stream"), + (b"[1, 2, 3]", "application/unknown"), + (b"function test() {}", "application/x-unknown"), + (b"test", "application/binary"), + (b"var x = 1;", "application/data"), + ], +) +def test_content_based_detection(mock_response, content, content_type): + """Test content-based detection for text-like content""" + mock_response.headers = {"content-type": content_type} + type(mock_response).content = PropertyMock(return_value=content) + response = Response(mock_response) + assert not response.is_file, f"Content {content} with type {content_type} should not be identified as a file" + + +@pytest.mark.parametrize( + ("content", "content_type"), + [ + (bytes([0x00, 0xFF] * 512), "application/octet-stream"), + (bytes([0x89, 0x50, 0x4E, 0x47]), "application/unknown"), # PNG magic numbers + (bytes([0xFF, 0xD8, 0xFF]), "application/binary"), # JPEG magic numbers + ], +) +def test_binary_content_detection(mock_response, content, content_type): + """Test content-based detection for binary content""" + mock_response.headers = {"content-type": content_type} + type(mock_response).content = PropertyMock(return_value=content) + response = Response(mock_response) + assert response.is_file, f"Binary content with type {content_type} should be identified as a file" + + +@pytest.mark.parametrize( + ("content_type", "expected_main_type"), + [ + ("x-world/x-vrml", "model"), # VRML 3D model + ("font/ttf", "application"), # TrueType font + ("text/csv", "text"), # CSV text file + ("unknown/xyz", None), # Unknown type + ], +) +def test_mimetype_based_detection(mock_response, content_type, expected_main_type): + """Test detection using mimetypes.guess_type for non-application content types""" + mock_response.headers = {"content-type": content_type} + type(mock_response).content = PropertyMock(return_value=bytes([0x00])) # Dummy content + + with patch("core.workflow.nodes.http_request.entities.mimetypes.guess_type") as mock_guess_type: + # Mock the return value based on expected_main_type + if expected_main_type: + mock_guess_type.return_value = (f"{expected_main_type}/subtype", None) + else: + mock_guess_type.return_value = (None, None) + + response = Response(mock_response) + + # Check if the result matches our expectation + if expected_main_type in ("application", "image", "audio", "video"): + assert response.is_file, f"Content type {content_type} should be identified as a file" + else: + assert not response.is_file, f"Content type {content_type} should not be identified as a file" + + # Verify that guess_type was called + mock_guess_type.assert_called_once() + + +def test_is_file_with_inline_disposition(mock_response): + """Test is_file when content-disposition is 'inline'""" + mock_response.headers = {"content-disposition": "inline", "content-type": "application/pdf"} + # Mock binary content + type(mock_response).content = PropertyMock(return_value=bytes([0x00, 0xFF] * 512)) + response = Response(mock_response) + assert response.is_file + + +def test_is_file_with_no_content_disposition(mock_response): + """Test is_file when no content-disposition header is present""" + mock_response.headers = {"content-type": "application/pdf"} + # Mock binary content + type(mock_response).content = PropertyMock(return_value=bytes([0x00, 0xFF] * 512)) + response = Response(mock_response) + assert response.is_file diff --git a/api/tests/unit_tests/utils/test_text_processing.py b/api/tests/unit_tests/utils/test_text_processing.py new file mode 100644 index 0000000000..f9d00d0b39 --- /dev/null +++ b/api/tests/unit_tests/utils/test_text_processing.py @@ -0,0 +1,20 @@ +from textwrap import dedent + +import pytest + +from core.tools.utils.text_processing_utils import remove_leading_symbols + + +@pytest.mark.parametrize( + ("input_text", "expected_output"), + [ + ("...Hello, World!", "Hello, World!"), + ("。测试中文标点", "测试中文标点"), + ("!@#Test symbols", "Test symbols"), + ("Hello, World!", "Hello, World!"), + ("", ""), + (" ", " "), + ], +) +def test_remove_leading_symbols(input_text, expected_output): + assert remove_leading_symbols(input_text) == expected_output diff --git a/docker-legacy/docker-compose.yaml b/docker-legacy/docker-compose.yaml index 9c2a1fe980..7bf2cd4708 100644 --- a/docker-legacy/docker-compose.yaml +++ b/docker-legacy/docker-compose.yaml @@ -2,7 +2,7 @@ version: '3' services: # API service api: - image: langgenius/dify-api:0.11.1 + image: langgenius/dify-api:0.11.2 restart: always environment: # Startup mode, 'api' starts the API server. @@ -227,7 +227,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:0.11.1 + image: langgenius/dify-api:0.11.2 restart: always environment: CONSOLE_WEB_URL: '' @@ -397,7 +397,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:0.11.1 + image: langgenius/dify-web:0.11.2 restart: always environment: # The base URL of console application api server, refers to the Console base URL of WEB service if console domain is diff --git a/docker/.env.example b/docker/.env.example index cf09f72bce..d29c66535d 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -49,7 +49,7 @@ FILES_URL= # Supported values are `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL` LOG_LEVEL=INFO # Log file path -LOG_FILE= +LOG_FILE=/app/logs/server.log # Log file max size, the unit is MB LOG_FILE_MAX_SIZE=20 # Log file max backup count @@ -75,7 +75,8 @@ SECRET_KEY=sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U # Password for admin user initialization. # If left unset, admin user will not be prompted for a password -# when creating the initial admin account. +# when creating the initial admin account. +# The length of the password cannot exceed 30 charactors. INIT_PASSWORD= # Deployment environment. @@ -239,6 +240,12 @@ REDIS_SENTINEL_USERNAME= REDIS_SENTINEL_PASSWORD= REDIS_SENTINEL_SOCKET_TIMEOUT=0.1 +# List of Redis Cluster nodes. If Cluster mode is enabled, provide at least one Cluster IP and port. +# Format: `:,:,:` +REDIS_USE_CLUSTERS=false +REDIS_CLUSTERS= +REDIS_CLUSTERS_PASSWORD= + # ------------------------------ # Celery Configuration # ------------------------------ @@ -450,6 +457,10 @@ ANALYTICDB_ACCOUNT=testaccount ANALYTICDB_PASSWORD=testpassword ANALYTICDB_NAMESPACE=dify ANALYTICDB_NAMESPACE_PASSWORD=difypassword +ANALYTICDB_HOST=gp-test.aliyuncs.com +ANALYTICDB_PORT=5432 +ANALYTICDB_MIN_CONNECTION=1 +ANALYTICDB_MAX_CONNECTION=5 # TiDB vector configurations, only available when VECTOR_STORE is `tidb` TIDB_VECTOR_HOST=tidb @@ -558,7 +569,7 @@ UPLOAD_FILE_SIZE_LIMIT=15 # The maximum number of files that can be uploaded at a time, default 5. UPLOAD_FILE_BATCH_LIMIT=5 -# ETl type, support: `dify`, `Unstructured` +# ETL type, support: `dify`, `Unstructured` # `dify` Dify's proprietary file extraction scheme # `Unstructured` Unstructured.io file extraction scheme ETL_TYPE=dify @@ -916,4 +927,4 @@ POSITION_PROVIDER_EXCLUDES= CSP_WHITELIST= # Enable or disable create tidb service job -CREATE_TIDB_SERVICE_JOB_ENABLED=false \ No newline at end of file +CREATE_TIDB_SERVICE_JOB_ENABLED=false diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 0de68c5299..f49dfb2ff7 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -55,6 +55,9 @@ x-shared-env: &shared-api-worker-env REDIS_SENTINEL_USERNAME: ${REDIS_SENTINEL_USERNAME:-} REDIS_SENTINEL_PASSWORD: ${REDIS_SENTINEL_PASSWORD:-} REDIS_SENTINEL_SOCKET_TIMEOUT: ${REDIS_SENTINEL_SOCKET_TIMEOUT:-0.1} + REDIS_CLUSTERS: ${REDIS_CLUSTERS:-} + REDIS_USE_CLUSTERS: ${REDIS_USE_CLUSTERS:-false} + REDIS_CLUSTERS_PASSWORD: ${REDIS_CLUSTERS_PASSWORD:-} ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60} CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://:difyai123456@redis:6379/1} BROKER_USE_SSL: ${BROKER_USE_SSL:-false} @@ -185,6 +188,10 @@ x-shared-env: &shared-api-worker-env ANALYTICDB_PASSWORD: ${ANALYTICDB_PASSWORD:-} ANALYTICDB_NAMESPACE: ${ANALYTICDB_NAMESPACE:-dify} ANALYTICDB_NAMESPACE_PASSWORD: ${ANALYTICDB_NAMESPACE_PASSWORD:-} + ANALYTICDB_HOST: ${ANALYTICDB_HOST:-} + ANALYTICDB_PORT: ${ANALYTICDB_PORT:-5432} + ANALYTICDB_MIN_CONNECTION: ${ANALYTICDB_MIN_CONNECTION:-1} + ANALYTICDB_MAX_CONNECTION: ${ANALYTICDB_MAX_CONNECTION:-5} OPENSEARCH_HOST: ${OPENSEARCH_HOST:-opensearch} OPENSEARCH_PORT: ${OPENSEARCH_PORT:-9200} OPENSEARCH_USER: ${OPENSEARCH_USER:-admin} @@ -283,7 +290,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:0.11.1 + image: langgenius/dify-api:0.11.2 restart: always environment: # Use the shared environment variables. @@ -303,7 +310,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:0.11.1 + image: langgenius/dify-api:0.11.2 restart: always environment: # Use the shared environment variables. @@ -322,7 +329,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:0.11.1 + image: langgenius/dify-web:0.11.2 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} diff --git a/web/.gitignore b/web/.gitignore index efcbf2bfcd..048c5f6485 100644 --- a/web/.gitignore +++ b/web/.gitignore @@ -50,3 +50,7 @@ package-lock.json # storybook /storybook-static *storybook.log + +# mise +mise.toml + diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/chartView.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/chartView.tsx index b01bc1b856..b5d3462dfa 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/chartView.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/chartView.tsx @@ -7,7 +7,7 @@ import type { PeriodParams } from '@/app/components/app/overview/appChart' import { AvgResponseTime, AvgSessionInteractions, AvgUserInteractions, ConversationsChart, CostChart, EndUsersChart, MessagesChart, TokenPerSecond, UserSatisfactionRate, WorkflowCostChart, WorkflowDailyTerminalsChart, WorkflowMessagesChart } from '@/app/components/app/overview/appChart' import type { Item } from '@/app/components/base/select' import { SimpleSelect } from '@/app/components/base/select' -import { TIME_PERIOD_LIST } from '@/app/components/app/log/filter' +import { TIME_PERIOD_MAPPING } from '@/app/components/app/log/filter' import { useStore as useAppStore } from '@/app/components/app/store' dayjs.extend(quarterOfYear) @@ -28,7 +28,7 @@ export default function ChartView({ appId }: IChartViewProps) { const [period, setPeriod] = useState({ name: t('appLog.filter.period.last7days'), query: { start: today.subtract(7, 'day').startOf('day').format(queryDateFormat), end: today.endOf('day').format(queryDateFormat) } }) const onSelect = (item: Item) => { - if (item.value === 'all') { + if (item.value === '-1') { setPeriod({ name: item.name, query: undefined }) } else if (item.value === 0) { @@ -49,10 +49,15 @@ export default function ChartView({ appId }: IChartViewProps) {
{t('appOverview.analysis.title')} ({ value: item.value, name: t(`appLog.filter.period.${item.name}`) }))} + items={Object.entries(TIME_PERIOD_MAPPING).map(([k, v]) => ({ value: k, name: t(`appLog.filter.period.${v.name}`) }))} className='mt-0 !w-40' - onSelect={onSelect} - defaultValue={7} + onSelect={(item) => { + const id = item.value + const value = TIME_PERIOD_MAPPING[id]?.value || '-1' + const name = item.name || t('appLog.filter.period.allTime') + onSelect({ value, name }) + }} + defaultValue={'2'} />
{!isWorkflow && ( diff --git a/web/app/(commonLayout)/datasets/Container.tsx b/web/app/(commonLayout)/datasets/Container.tsx index c30cc18418..3be8b2a968 100644 --- a/web/app/(commonLayout)/datasets/Container.tsx +++ b/web/app/(commonLayout)/datasets/Container.tsx @@ -27,6 +27,7 @@ import { useTabSearchParams } from '@/hooks/use-tab-searchparams' import { useStore as useTagStore } from '@/app/components/base/tag-management/store' import { useAppContext } from '@/context/app-context' import { useExternalApiPanel } from '@/context/external-api-panel-context' + import { useQuery } from '@tanstack/react-query' const Container = () => { @@ -49,7 +50,7 @@ const Container = () => { const containerRef = useRef(null) const { data } = useQuery( { - queryKey: ['datasetApiBaseInfo', activeTab], + queryKey: ['datasetApiBaseInfo'], queryFn: () => fetchDatasetApiBaseUrl('/datasets/api-base-info'), enabled: activeTab !== 'dataset', }, diff --git a/web/app/(commonLayout)/datasets/template/template.en.mdx b/web/app/(commonLayout)/datasets/template/template.en.mdx index 02e23429ce..d3dcfc4b24 100644 --- a/web/app/(commonLayout)/datasets/template/template.en.mdx +++ b/web/app/(commonLayout)/datasets/template/template.en.mdx @@ -329,7 +329,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from diff --git a/web/app/(commonLayout)/datasets/template/template.zh.mdx b/web/app/(commonLayout)/datasets/template/template.zh.mdx index e5d5f56120..db15ede9fc 100644 --- a/web/app/(commonLayout)/datasets/template/template.zh.mdx +++ b/web/app/(commonLayout)/datasets/template/template.zh.mdx @@ -329,7 +329,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from diff --git a/web/app/(commonLayout)/plugins/test/card/page.tsx b/web/app/(commonLayout)/plugins/test/card/page.tsx index 02e80d8563..86e0da56bf 100644 --- a/web/app/(commonLayout)/plugins/test/card/page.tsx +++ b/web/app/(commonLayout)/plugins/test/card/page.tsx @@ -7,6 +7,7 @@ import CardMoreInfo from '@/app/components/plugins/card/card-more-info' import Badge from '@/app/components/base/badge' import InstallBundle from '@/app/components/plugins/install-plugin/install-bundle' import { useBoolean } from 'ahooks' +import LoadingError from '@/app/components/plugins/install-plugin/base/loading-error' const PluginList = () => { const pluginList = [toolNotion, extensionDallE, modelGPT4, customTool] @@ -16,6 +17,7 @@ const PluginList = () => { return (
+ {isShow && ( { github_plugin_unique_identifier: 'yixiao0/test:0.0.1@3592166c87afcf944b4f13f27467a5c8f9e00bd349cb42033a072734a37431b4', }, }, - { - type: 'github', - value: { - package: 'dify-test.difypkg', - repo: 'WTW0313/dify-test', - version: '0.0.5-beta.2', - github_plugin_unique_identifier: 'wtw0313/dify-test:0.0.1@1633daa043b47155d4228e2db7734245fd6d3e20ba812e5c02ce69fc1e3038f4', - }, - }, - { - type: 'marketplace', - value: { - plugin_unique_identifier: 'langgenius/openai:0.0.2@7baee9635a07573ea192621ebfdacb39db466fa691e75255beaf48bf41d44375', - }, - }, + // { + // type: 'github', + // value: { + // package: 'dify-test.difypkg', + // repo: 'WTW0313/dify-test', + // release: '0.0.5-beta.2', + // github_plugin_unique_identifier: 'wtw0313/dify-test:0.0.1@1633daa043b47155d4228e2db7734245fd6d3e20ba812e5c02ce69fc1e3038f4', + // }, + // }, + // { + // type: 'marketplace', + // value: { + // plugin_unique_identifier: 'langgenius/openai:0.0.2@7baee9635a07573ea192621ebfdacb39db466fa691e75255beaf48bf41d44375', + // }, + // }, ]} /> ) } diff --git a/web/app/components/app/configuration/config/agent/agent-tools/index.tsx b/web/app/components/app/configuration/config/agent/agent-tools/index.tsx index 35cee8eefc..40c4232270 100644 --- a/web/app/components/app/configuration/config/agent/agent-tools/index.tsx +++ b/web/app/components/app/configuration/config/agent/agent-tools/index.tsx @@ -44,13 +44,13 @@ const AgentTools: FC = () => { const [currentTool, setCurrentTool] = useState(null) const currentCollection = useMemo(() => { if (!currentTool) return null - const collection = collectionList.find(collection => collection.id === currentTool?.provider_id && collection.type === currentTool?.provider_type) + const collection = collectionList.find(collection => collection.id === currentTool?.provider_id.split('/').pop() && collection.type === currentTool?.provider_type) return collection }, [currentTool, collectionList]) const [isShowSettingTool, setIsShowSettingTool] = useState(false) const [isShowSettingAuth, setShowSettingAuth] = useState(false) const tools = (modelConfig?.agentConfig?.tools as AgentTool[] || []).map((item) => { - const collection = collectionList.find(collection => collection.id === item.provider_id && collection.type === item.provider_type) + const collection = collectionList.find(collection => collection.id === item.provider_id.split('/').pop() && collection.type === item.provider_type) const icon = collection?.icon return { ...item, @@ -157,11 +157,10 @@ const AgentTools: FC = () => {
- {item.provider_type === CollectionType.builtIn ? item.provider_name : item.tool_label} + {item.provider_type === CollectionType.builtIn ? item.provider_name.split('/').pop() : item.tool_label} {item.tool_name} {!item.isDeleted && ( = ({ isChatMode, appId, queryParams, setQueryPara className='min-w-[150px]' panelClassName='w-[270px]' leftIcon={} - value={queryParams.period || 7} + value={queryParams.period} onSelect={(item) => { - setQueryParams({ ...queryParams, period: item.value as string }) + setQueryParams({ ...queryParams, period: item.value }) }} - onClear={() => setQueryParams({ ...queryParams, period: 7 })} - items={TIME_PERIOD_LIST.map(item => ({ value: item.value, name: t(`appLog.filter.period.${item.name}`) }))} + onClear={() => setQueryParams({ ...queryParams, period: '9' })} + items={Object.entries(TIME_PERIOD_MAPPING).map(([k, v]) => ({ value: k, name: t(`appLog.filter.period.${v.name}`) }))} /> = ({ appUrl }) => { const Logs: FC = ({ appDetail }) => { const { t } = useTranslation() const [queryParams, setQueryParams] = useState({ - period: 7, + period: '2', annotation_status: 'all', sort_by: '-created_at', }) @@ -68,9 +68,9 @@ const Logs: FC = ({ appDetail }) => { const query = { page: currPage + 1, limit: APP_PAGE_LIMIT, - ...(debouncedQueryParams.period !== 'all' + ...((debouncedQueryParams.period !== '9') ? { - start: dayjs().subtract(debouncedQueryParams.period as number, 'day').startOf('day').format('YYYY-MM-DD HH:mm'), + start: dayjs().subtract(TIME_PERIOD_MAPPING[debouncedQueryParams.period].value, 'day').startOf('day').format('YYYY-MM-DD HH:mm'), end: dayjs().endOf('day').format('YYYY-MM-DD HH:mm'), } : {}), @@ -130,7 +130,7 @@ const Logs: FC = ({ appDetail }) => { {t('appLog.table.pagination.previous')} -
+
= ({ logs, appDetail, onRefresh }) const [showDrawer, setShowDrawer] = useState(false) // Whether to display the chat details drawer const [currentConversation, setCurrentConversation] = useState() // Currently selected conversation const isChatMode = appDetail.mode !== 'completion' // Whether the app is a chat app + const { setShowPromptLogModal, setShowAgentLogModal } = useAppStore(useShallow(state => ({ + setShowPromptLogModal: state.setShowPromptLogModal, + setShowAgentLogModal: state.setShowAgentLogModal, + }))) // Annotated data needs to be highlighted const renderTdValue = (value: string | number | null, isEmptyStyle: boolean, isHighlight = false, annotation?: LogAnnotation) => { @@ -700,6 +704,8 @@ const ConversationList: FC = ({ logs, appDetail, onRefresh }) onRefresh() setShowDrawer(false) setCurrentConversation(undefined) + setShowPromptLogModal(false) + setShowAgentLogModal(false) } if (!logs) diff --git a/web/app/components/base/chat/chat/chat-input-area/index.tsx b/web/app/components/base/chat/chat/chat-input-area/index.tsx index 32d841148a..5169e65a59 100644 --- a/web/app/components/base/chat/chat/chat-input-area/index.tsx +++ b/web/app/components/base/chat/chat/chat-input-area/index.tsx @@ -1,5 +1,6 @@ import { useCallback, + useRef, useState, } from 'react' import Textarea from 'rc-textarea' @@ -73,7 +74,8 @@ const ChatInputArea = ({ isDragActive, } = useFile(visionConfig!) const { checkInputsForm } = useCheckInputsForms() - + const historyRef = useRef(['']) + const [currentIndex, setCurrentIndex] = useState(-1) const handleSend = () => { if (onSend) { const { files, setFiles } = filesStore.getState() @@ -92,13 +94,33 @@ const ChatInputArea = ({ } } } - const handleKeyDown = (e: React.KeyboardEvent) => { if (e.key === 'Enter' && !e.shiftKey && !e.nativeEvent.isComposing) { e.preventDefault() setQuery(query.replace(/\n$/, '')) + historyRef.current.push(query) + setCurrentIndex(historyRef.current.length) handleSend() } + else if (e.key === 'ArrowUp' && !e.shiftKey && !e.nativeEvent.isComposing) { + // When the up key is pressed, output the previous element + if (currentIndex > 0) { + setCurrentIndex(currentIndex - 1) + setQuery(historyRef.current[currentIndex - 1]) + } + } + else if (e.key === 'ArrowDown' && !e.shiftKey && !e.nativeEvent.isComposing) { + // When the down key is pressed, output the next element + if (currentIndex < historyRef.current.length - 1) { + setCurrentIndex(currentIndex + 1) + setQuery(historyRef.current[currentIndex + 1]) + } + else if (currentIndex === historyRef.current.length - 1) { + // If it is the last element, clear the input box + setCurrentIndex(historyRef.current.length) + setQuery('') + } + } } const handleShowVoiceInput = useCallback(() => { diff --git a/web/app/components/base/file-uploader/file-input.tsx b/web/app/components/base/file-uploader/file-input.tsx index ff71cf1030..f7d659c66f 100644 --- a/web/app/components/base/file-uploader/file-input.tsx +++ b/web/app/components/base/file-uploader/file-input.tsx @@ -13,15 +13,24 @@ const FileInput = ({ const files = useStore(s => s.files) const { handleLocalFileUpload } = useFile(fileConfig) const handleChange = (e: React.ChangeEvent) => { - const file = e.target.files?.[0] + const targetFiles = e.target.files - if (file) - handleLocalFileUpload(file) + if (targetFiles) { + if (fileConfig.number_limits) { + for (let i = 0; i < targetFiles.length; i++) { + if (i + 1 + files.length <= fileConfig.number_limits) + handleLocalFileUpload(targetFiles[i]) + } + } + else { + handleLocalFileUpload(targetFiles[0]) + } + } } const allowedFileTypes = fileConfig.allowed_file_types const isCustom = allowedFileTypes?.includes(SupportUploadFileTypes.custom) - const exts = isCustom ? (fileConfig.allowed_file_extensions?.map(item => `.${item}`) || []) : (allowedFileTypes?.map(type => FILE_EXTS[type]) || []).flat().map(item => `.${item}`) + const exts = isCustom ? (fileConfig.allowed_file_extensions || []) : (allowedFileTypes?.map(type => FILE_EXTS[type]) || []).flat().map(item => `.${item}`) const accept = exts.join(',') return ( @@ -32,6 +41,7 @@ const FileInput = ({ onChange={handleChange} accept={accept} disabled={!!(fileConfig.number_limits && files.length >= fileConfig?.number_limits)} + multiple={!!fileConfig.number_limits && fileConfig.number_limits > 1} /> ) } diff --git a/web/app/components/base/file-uploader/file-uploader-in-chat-input/file-item.tsx b/web/app/components/base/file-uploader/file-uploader-in-chat-input/file-item.tsx index a051b89ec1..fcf665643c 100644 --- a/web/app/components/base/file-uploader/file-uploader-in-chat-input/file-item.tsx +++ b/web/app/components/base/file-uploader/file-uploader-in-chat-input/file-item.tsx @@ -98,6 +98,7 @@ const FileItem = ({ ) } diff --git a/web/app/components/base/file-uploader/hooks.ts b/web/app/components/base/file-uploader/hooks.ts index c735754ffe..256202d783 100644 --- a/web/app/components/base/file-uploader/hooks.ts +++ b/web/app/components/base/file-uploader/hooks.ts @@ -241,7 +241,7 @@ export const useFile = (fileConfig: FileUpload) => { notify({ type: 'error', message: t('common.fileUploader.pasteFileLinkInvalid') }) handleRemoveFile(uploadingFile.id) }) - }, [checkSizeLimit, handleAddFile, handleUpdateFile, notify, t, handleRemoveFile, fileConfig?.allowed_file_types, fileConfig.allowed_file_extensions, startProgressTimer]) + }, [checkSizeLimit, handleAddFile, handleUpdateFile, notify, t, handleRemoveFile, fileConfig?.allowed_file_types, fileConfig.allowed_file_extensions, startProgressTimer, params.token]) const handleLoadFileFromLinkSuccess = useCallback(() => { }, []) diff --git a/web/app/components/base/file-uploader/utils.ts b/web/app/components/base/file-uploader/utils.ts index eb9199d74b..aa8625f221 100644 --- a/web/app/components/base/file-uploader/utils.ts +++ b/web/app/components/base/file-uploader/utils.ts @@ -44,21 +44,24 @@ export const fileUpload: FileUpload = ({ } export const getFileExtension = (fileName: string, fileMimetype: string, isRemote?: boolean) => { + let extension = '' if (fileMimetype) - return mime.getExtension(fileMimetype) || '' + extension = mime.getExtension(fileMimetype) || '' - if (isRemote) - return '' - - if (fileName) { + if (fileName && !extension) { const fileNamePair = fileName.split('.') const fileNamePairLength = fileNamePair.length if (fileNamePairLength > 1) - return fileNamePair[fileNamePairLength - 1] + extension = fileNamePair[fileNamePairLength - 1] + else + extension = '' } - return '' + if (isRemote) + extension = '' + + return extension } export const getFileAppearanceType = (fileName: string, fileMimetype: string) => { @@ -145,7 +148,7 @@ export const getFileNameFromUrl = (url: string) => { export const getSupportFileExtensionList = (allowFileTypes: string[], allowFileExtensions: string[]) => { if (allowFileTypes.includes(SupportUploadFileTypes.custom)) - return allowFileExtensions.map(item => item.toUpperCase()) + return allowFileExtensions.map(item => item.slice(1).toUpperCase()) return allowFileTypes.map(type => FILE_EXTS[type]).flat() } diff --git a/web/app/components/base/markdown.tsx b/web/app/components/base/markdown.tsx index 48d1d2a0a5..37fbc2dfbd 100644 --- a/web/app/components/base/markdown.tsx +++ b/web/app/components/base/markdown.tsx @@ -138,7 +138,7 @@ const CodeBlock: Components['code'] = memo(({ className, children, ...props }) = try { return JSON.parse(String(children).replace(/\n$/, '')) } - catch {} + catch { } } return JSON.parse('{"title":{"text":"ECharts error - Wrong JSON format."}}') }, [language, children]) @@ -196,7 +196,7 @@ const CodeBlock: Components['code'] = memo(({ className, children, ...props }) = >
{languageShowName}
- {(['mermaid', 'svg']).includes(language!) && } + {(['mermaid', 'svg']).includes(language!) && } { }) // AudioBlock.displayName = 'AudioBlock' +const ScriptBlock = memo(({ node }: any) => { + const scriptContent = node.children[0]?.value || '' + return `` +}) +ScriptBlock.displayName = 'ScriptBlock' + const Paragraph: Components['p'] = ({ node, children }) => { const children_node = node!.children if (children_node && children_node[0] && 'tagName' in children_node[0] && children_node[0].tagName === 'img') @@ -278,7 +284,7 @@ export function Markdown(props: { content: string; className?: string }) { } }, ]} - disallowedElements={['script', 'iframe', 'head', 'html', 'meta', 'link', 'style', 'body']} + disallowedElements={['iframe', 'head', 'html', 'meta', 'link', 'style', 'body']} components={{ pre: PreBlock, code: CodeBlock, @@ -289,6 +295,7 @@ export function Markdown(props: { content: string; className?: string }) { p: Paragraph, button: MarkdownButton, form: MarkdownForm, + script: ScriptBlock, }} > {/* Markdown detect has problem. */} diff --git a/web/app/components/develop/doc.tsx b/web/app/components/develop/doc.tsx index eddc07d916..ce5471676d 100644 --- a/web/app/components/develop/doc.tsx +++ b/web/app/components/develop/doc.tsx @@ -1,5 +1,8 @@ 'use client' +import { useEffect, useState } from 'react' import { useContext } from 'use-context-selector' +import { useTranslation } from 'react-i18next' +import { RiListUnordered } from '@remixicon/react' import TemplateEn from './template/template.en.mdx' import TemplateZh from './template/template.zh.mdx' import TemplateAdvancedChatEn from './template/template_advanced_chat.en.mdx' @@ -17,6 +20,9 @@ type IDocProps = { const Doc = ({ appDetail }: IDocProps) => { const { locale } = useContext(I18n) + const { t } = useTranslation() + const [toc, setToc] = useState>([]) + const [isTocExpanded, setIsTocExpanded] = useState(false) const variables = appDetail?.model_config?.configs?.prompt_variables || [] const inputs = variables.reduce((res: any, variable: any) => { @@ -24,21 +30,87 @@ const Doc = ({ appDetail }: IDocProps) => { return res }, {}) + useEffect(() => { + const mediaQuery = window.matchMedia('(min-width: 1280px)') + setIsTocExpanded(mediaQuery.matches) + }, []) + + useEffect(() => { + const extractTOC = () => { + const article = document.querySelector('article') + if (article) { + const headings = article.querySelectorAll('h2') + const tocItems = Array.from(headings).map((heading) => { + const anchor = heading.querySelector('a') + if (anchor) { + return { + href: anchor.getAttribute('href') || '', + text: anchor.textContent || '', + } + } + return null + }).filter((item): item is { href: string; text: string } => item !== null) + setToc(tocItems) + } + } + + // Run after component has rendered + setTimeout(extractTOC, 0) + }, [appDetail, locale]) + return ( -
- {(appDetail?.mode === 'chat' || appDetail?.mode === 'agent-chat') && ( - locale !== LanguagesSupported[1] ? : - )} - {appDetail?.mode === 'advanced-chat' && ( - locale !== LanguagesSupported[1] ? : - )} - {appDetail?.mode === 'workflow' && ( - locale !== LanguagesSupported[1] ? : - )} - {appDetail?.mode === 'completion' && ( - locale !== LanguagesSupported[1] ? : - )} -
+
+
+ {isTocExpanded + ? ( + + ) + : ( + + )} +
+
+ {(appDetail?.mode === 'chat' || appDetail?.mode === 'agent-chat') && ( + locale !== LanguagesSupported[1] ? : + )} + {appDetail?.mode === 'advanced-chat' && ( + locale !== LanguagesSupported[1] ? : + )} + {appDetail?.mode === 'workflow' && ( + locale !== LanguagesSupported[1] ? : + )} + {appDetail?.mode === 'completion' && ( + locale !== LanguagesSupported[1] ? : + )} +
+
) } diff --git a/web/app/components/develop/template/template.en.mdx b/web/app/components/develop/template/template.en.mdx index 61ecd7ae97..c923ea30db 100755 --- a/web/app/components/develop/template/template.en.mdx +++ b/web/app/components/develop/template/template.en.mdx @@ -503,7 +503,7 @@ The text generation application offers non-session support and is ideal for tran diff --git a/web/app/components/develop/template/template_advanced_chat.en.mdx b/web/app/components/develop/template/template_advanced_chat.en.mdx index c3c3f7c6f3..7d64caa769 100644 --- a/web/app/components/develop/template/template_advanced_chat.en.mdx +++ b/web/app/components/develop/template/template_advanced_chat.en.mdx @@ -480,7 +480,7 @@ Chat applications support session persistence, allowing previous chat history to @@ -884,7 +884,7 @@ Chat applications support session persistence, allowing previous chat history to diff --git a/web/app/components/develop/template/template_chat.en.mdx b/web/app/components/develop/template/template_chat.en.mdx index f44f991b89..ac8ee9d657 100644 --- a/web/app/components/develop/template/template_chat.en.mdx +++ b/web/app/components/develop/template/template_chat.en.mdx @@ -444,7 +444,7 @@ Chat applications support session persistence, allowing previous chat history to @@ -918,7 +918,7 @@ Chat applications support session persistence, allowing previous chat history to diff --git a/web/app/components/develop/template/template_workflow.en.mdx b/web/app/components/develop/template/template_workflow.en.mdx index 6cb02bf844..be2ef54743 100644 --- a/web/app/components/develop/template/template_workflow.en.mdx +++ b/web/app/components/develop/template/template_workflow.en.mdx @@ -32,7 +32,7 @@ Workflow applications offers non-session support and is ideal for translation, a @@ -505,7 +505,7 @@ Workflow applications offers non-session support and is ideal for translation, a diff --git a/web/app/components/explore/sidebar/index.tsx b/web/app/components/explore/sidebar/index.tsx index a4a40a00a2..13d5a0ec8f 100644 --- a/web/app/components/explore/sidebar/index.tsx +++ b/web/app/components/explore/sidebar/index.tsx @@ -11,6 +11,7 @@ import cn from '@/utils/classnames' import { fetchInstalledAppList as doFetchInstalledAppList, uninstallApp, updatePinStatus } from '@/service/explore' import ExploreContext from '@/context/explore-context' import Confirm from '@/app/components/base/confirm' +import Divider from '@/app/components/base/divider' import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints' const SelectedDiscoveryIcon = () => ( @@ -89,6 +90,7 @@ const SideBar: FC = ({ fetchInstalledAppList() }, [controlUpdateInstalledApps]) + const pinnedAppsCount = installedApps.filter(({ is_pinned }) => is_pinned).length return (
@@ -109,10 +111,9 @@ const SideBar: FC = ({ height: 'calc(100vh - 250px)', }} > - {installedApps.map(({ id, is_pinned, uninstallable, app: { name, icon_type, icon, icon_url, icon_background } }) => { - return ( + {installedApps.map(({ id, is_pinned, uninstallable, app: { name, icon_type, icon, icon_url, icon_background } }, index) => ( + = ({ setShowConfirm(true) }} /> - ) - })} + {index === pinnedAppsCount - 1 && index !== installedApps.length - 1 && } + + ))}
)} diff --git a/web/app/components/header/account-setting/members-page/index.tsx b/web/app/components/header/account-setting/members-page/index.tsx index 2eaee6f901..b599eb09e7 100644 --- a/web/app/components/header/account-setting/members-page/index.tsx +++ b/web/app/components/header/account-setting/members-page/index.tsx @@ -34,13 +34,12 @@ const MembersPage = () => { } const { locale } = useContext(I18n) - const { userProfile, currentWorkspace, isCurrentWorkspaceManager } = useAppContext() + const { userProfile, currentWorkspace, isCurrentWorkspaceOwner, isCurrentWorkspaceManager } = useAppContext() const { data, mutate } = useSWR({ url: '/workspaces/current/members' }, fetchMembers) const [inviteModalVisible, setInviteModalVisible] = useState(false) const [invitationResults, setInvitationResults] = useState([]) const [invitedModalVisible, setInvitedModalVisible] = useState(false) const accounts = data?.accounts || [] - const owner = accounts.filter(account => account.role === 'owner')?.[0]?.email === userProfile.email const { plan, enableBilling } = useProviderContext() const isNotUnlimitedMemberPlan = enableBilling && plan.type !== Plan.team && plan.type !== Plan.enterprise const isMemberFull = enableBilling && isNotUnlimitedMemberPlan && accounts.length >= plan.total.teamMembers @@ -109,8 +108,8 @@ const MembersPage = () => {
{dayjs(Number((account.last_active_at || account.created_at)) * 1000).locale(locale === 'zh-Hans' ? 'zh-cn' : 'en').fromNow()}
{ - (owner && account.role !== 'owner') - ? + ((isCurrentWorkspaceOwner && account.role !== 'owner') || (isCurrentWorkspaceManager && !['owner', 'admin'].includes(account.role))) + ? :
{RoleMap[account.role] || RoleMap.normal}
}
diff --git a/web/app/components/header/account-setting/members-page/operation/index.tsx b/web/app/components/header/account-setting/members-page/operation/index.tsx index e1fe25cb96..82867ec522 100644 --- a/web/app/components/header/account-setting/members-page/operation/index.tsx +++ b/web/app/components/header/account-setting/members-page/operation/index.tsx @@ -26,11 +26,13 @@ const itemDescClassName = ` type IOperationProps = { member: Member + operatorRole: string onOperate: () => void } const Operation = ({ member, + operatorRole, onOperate, }: IOperationProps) => { const { t } = useTranslation() @@ -43,11 +45,20 @@ const Operation = ({ dataset_operator: t('common.members.datasetOperator'), } const roleList = useMemo(() => { - return [ - ...['admin', 'editor', 'normal'], - ...(datasetOperatorEnabled ? ['dataset_operator'] : []), - ] - }, [datasetOperatorEnabled]) + if (operatorRole === 'owner') { + return [ + ...['admin', 'editor', 'normal'], + ...(datasetOperatorEnabled ? ['dataset_operator'] : []), + ] + } + if (operatorRole === 'admin') { + return [ + ...['editor', 'normal'], + ...(datasetOperatorEnabled ? ['dataset_operator'] : []), + ] + } + return [] + }, [operatorRole, datasetOperatorEnabled]) const { notify } = useContext(ToastContext) const toHump = (name: string) => name.replace(/_(\w)/g, (all, letter) => letter.toUpperCase()) const handleDeleteMemberOrCancelInvitation = async () => { diff --git a/web/app/components/header/account-setting/model-provider-page/index.tsx b/web/app/components/header/account-setting/model-provider-page/index.tsx index f807bd7922..7faf3f3de7 100644 --- a/web/app/components/header/account-setting/model-provider-page/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/index.tsx @@ -222,7 +222,7 @@ const ModelProviderPage = ({ searchText }: Props) => { {!collapse && !isPluginsLoading && (
{plugins.map(plugin => ( - + ))}
)} diff --git a/web/app/components/header/index.tsx b/web/app/components/header/index.tsx index 8f44bf6eae..a3b344e747 100644 --- a/web/app/components/header/index.tsx +++ b/web/app/components/header/index.tsx @@ -14,6 +14,7 @@ import PluginsNav from './plugins-nav' import ExploreNav from './explore-nav' import ToolsNav from './tools-nav' import GithubStar from './github-star' +import LicenseNav from './license-env' import { WorkspaceProvider } from '@/context/workspace-context' import { useAppContext } from '@/context/app-context' import LogoSite from '@/app/components/base/logo/logo-site' @@ -61,29 +62,29 @@ const Header = () => {
} {!isMobile - &&
- - - -
/
-
- - - - {enableBilling && ( -
- - -
- - {t('billing.upgradeBtn.encourageShort')} - -
-
-
- )} + &&
+ + + +
/
+
+ + + + {enableBilling && ( +
+ + +
+ + {t('billing.upgradeBtn.encourageShort')} + +
+
+
+ )} +
-
}
{isMobile && ( @@ -116,6 +117,7 @@ const Header = () => {
)}
+
diff --git a/web/app/components/header/license-env/index.tsx b/web/app/components/header/license-env/index.tsx new file mode 100644 index 0000000000..800d86d2b8 --- /dev/null +++ b/web/app/components/header/license-env/index.tsx @@ -0,0 +1,29 @@ +'use client' + +import AppContext from '@/context/app-context' +import { LicenseStatus } from '@/types/feature' +import { useTranslation } from 'react-i18next' +import { useContextSelector } from 'use-context-selector' +import dayjs from 'dayjs' + +const LicenseNav = () => { + const { t } = useTranslation() + const systemFeatures = useContextSelector(AppContext, s => s.systemFeatures) + + if (systemFeatures.license?.status === LicenseStatus.EXPIRING) { + const expiredAt = systemFeatures.license?.expired_at + const count = dayjs(expiredAt).diff(dayjs(), 'days') + return
+ {count <= 1 && {t('common.license.expiring', { count })}} + {count > 1 && {t('common.license.expiring_plural', { count })}} +
+ } + if (systemFeatures.license.status === LicenseStatus.ACTIVE) { + return
+ Enterprise +
+ } + return null +} + +export default LicenseNav diff --git a/web/app/components/plugins/install-plugin/base/installed.tsx b/web/app/components/plugins/install-plugin/base/installed.tsx index 442a61e372..eba50a6b21 100644 --- a/web/app/components/plugins/install-plugin/base/installed.tsx +++ b/web/app/components/plugins/install-plugin/base/installed.tsx @@ -1,11 +1,13 @@ 'use client' import type { FC } from 'react' import React from 'react' -import type { Plugin, PluginDeclaration, PluginManifestInMarket } from '../../types' +import { useTranslation } from 'react-i18next' import Card from '../../card' import Button from '@/app/components/base/button' +import { useUpdateModelProviders } from '@/app/components/header/account-setting/model-provider-page/hooks' +import { PluginType } from '../../types' +import type { Plugin, PluginDeclaration, PluginManifestInMarket } from '../../types' import { pluginManifestInMarketToPluginProps, pluginManifestToCardPluginProps } from '../utils' -import { useTranslation } from 'react-i18next' import Badge, { BadgeState } from '@/app/components/base/badge/index' type Props = { @@ -24,6 +26,13 @@ const Installed: FC = ({ onCancel, }) => { const { t } = useTranslation() + const updateModelProviders = useUpdateModelProviders() + + const handleClose = () => { + onCancel() + if (payload?.category === PluginType.model) + updateModelProviders() + } return ( <>
@@ -45,7 +54,7 @@ const Installed: FC = ({ diff --git a/web/app/components/plugins/install-plugin/base/loading-error.tsx b/web/app/components/plugins/install-plugin/base/loading-error.tsx new file mode 100644 index 0000000000..eb698bb573 --- /dev/null +++ b/web/app/components/plugins/install-plugin/base/loading-error.tsx @@ -0,0 +1,45 @@ +'use client' +import type { FC } from 'react' +import React from 'react' +import { Group } from '../../../base/icons/src/vender/other' +import { LoadingPlaceholder } from '@/app/components/plugins/card/base/placeholder' +import Checkbox from '@/app/components/base/checkbox' +import { RiCloseLine } from '@remixicon/react' +import { useTranslation } from 'react-i18next' + +const LoadingError: FC = () => { + const { t } = useTranslation() + return ( +
+ +
+
+
+
+ +
+
+ +
+
+
+
+ {t('plugin.installModal.pluginLoadError')} +
+
+ {t('plugin.installModal.pluginLoadErrorDesc')} +
+
+
+ +
+
+ ) +} +export default React.memo(LoadingError) diff --git a/web/app/components/plugins/install-plugin/install-bundle/item/loading.tsx b/web/app/components/plugins/install-plugin/base/loading.tsx similarity index 91% rename from web/app/components/plugins/install-plugin/install-bundle/item/loading.tsx rename to web/app/components/plugins/install-plugin/base/loading.tsx index 5e33363ecf..52cccc2cd0 100644 --- a/web/app/components/plugins/install-plugin/install-bundle/item/loading.tsx +++ b/web/app/components/plugins/install-plugin/base/loading.tsx @@ -1,6 +1,6 @@ 'use client' import React from 'react' -import Placeholder from '../../../card/base/placeholder' +import Placeholder from '../../card/base/placeholder' import Checkbox from '@/app/components/base/checkbox' const Loading = () => { diff --git a/web/app/components/plugins/install-plugin/install-bundle/item/github-item.tsx b/web/app/components/plugins/install-plugin/install-bundle/item/github-item.tsx index cfbe05ca5d..8440b488b2 100644 --- a/web/app/components/plugins/install-plugin/install-bundle/item/github-item.tsx +++ b/web/app/components/plugins/install-plugin/install-bundle/item/github-item.tsx @@ -4,7 +4,7 @@ import React, { useEffect } from 'react' import type { GitHubItemAndMarketPlaceDependency, Plugin } from '../../../types' import { pluginManifestToCardPluginProps } from '../../utils' import { useUploadGitHub } from '@/service/use-plugins' -import Loading from './loading' +import Loading from '../../base/loading' import LoadedItem from './loaded-item' type Props = { @@ -25,8 +25,8 @@ const Item: FC = ({ const info = dependency.value const { data, error } = useUploadGitHub({ repo: info.repo!, - version: info.version!, - package: info.package!, + version: info.release! || info.version!, + package: info.packages! || info.package!, }) const [payload, setPayload] = React.useState(null) useEffect(() => { diff --git a/web/app/components/plugins/install-plugin/install-bundle/item/marketplace-item.tsx b/web/app/components/plugins/install-plugin/install-bundle/item/marketplace-item.tsx index 836869df63..f7de4d09bc 100644 --- a/web/app/components/plugins/install-plugin/install-bundle/item/marketplace-item.tsx +++ b/web/app/components/plugins/install-plugin/install-bundle/item/marketplace-item.tsx @@ -2,7 +2,7 @@ import type { FC } from 'react' import React from 'react' import type { Plugin } from '../../../types' -import Loading from './loading' +import Loading from '../../base/loading' import LoadedItem from './loaded-item' type Props = { diff --git a/web/app/components/plugins/install-plugin/install-bundle/item/package-item.tsx b/web/app/components/plugins/install-plugin/install-bundle/item/package-item.tsx index bcdc72a1ce..b649aada8f 100644 --- a/web/app/components/plugins/install-plugin/install-bundle/item/package-item.tsx +++ b/web/app/components/plugins/install-plugin/install-bundle/item/package-item.tsx @@ -5,6 +5,7 @@ import type { Plugin } from '../../../types' import type { PackageDependency } from '../../../types' import { pluginManifestToCardPluginProps } from '../../utils' import LoadedItem from './loaded-item' +import LoadingError from '../../base/loading-error' type Props = { checked: boolean @@ -17,6 +18,9 @@ const PackageItem: FC = ({ checked, onCheckedChange, }) => { + if (!payload.value?.manifest) + return + const plugin = pluginManifestToCardPluginProps(payload.value.manifest) return ( = ({ onSelect, onLoadedAllPlugin, }) => { - const { isLoading: isFetchingMarketplaceData, data: marketplaceRes } = useFetchPluginsInMarketPlaceByIds(allPlugins.filter(d => d.type === 'marketplace').map(d => d.value.plugin_unique_identifier!)) + const { isLoading: isFetchingMarketplaceDataFromDSL, data: marketplaceFromDSLRes } = useFetchPluginsInMarketPlaceByIds(allPlugins.filter(d => d.type === 'marketplace').map(d => (d as GitHubItemAndMarketPlaceDependency).value.plugin_unique_identifier!)) + const { isLoading: isFetchingMarketplaceDataFromLocal, data: marketplaceResFromLocalRes } = useFetchPluginsInMarketPlaceByInfo(allPlugins.filter(d => d.type === 'marketplace').map(d => (d as GitHubItemAndMarketPlaceDependency).value!)) + const [plugins, setPlugins, getPlugins] = useGetState<(Plugin | undefined)[]>((() => { + const hasLocalPackage = allPlugins.some(d => d.type === 'package') + if (!hasLocalPackage) + return [] - const [plugins, setPlugins, getPlugins] = useGetState([]) + const _plugins = allPlugins.map((d) => { + if (d.type === 'package') { + return { + ...(d as any).value.manifest, + plugin_id: (d as any).value.unique_identifier, + } + } + + return undefined + }) + return _plugins + })()) const [errorIndexes, setErrorIndexes] = useState([]) @@ -53,21 +70,50 @@ const InstallByDSLList: FC = ({ }, [allPlugins]) useEffect(() => { - if (!isFetchingMarketplaceData && marketplaceRes?.data.plugins && marketplaceRes?.data.plugins.length > 0) { - const payloads = marketplaceRes?.data.plugins - + if (!isFetchingMarketplaceDataFromDSL && marketplaceFromDSLRes?.data.plugins) { + const payloads = marketplaceFromDSLRes?.data.plugins + const failedIndex: number[] = [] const nextPlugins = produce(getPlugins(), (draft) => { marketPlaceInDSLIndex.forEach((index, i) => { - draft[index] = payloads[i] + if (payloads[i]) + draft[index] = payloads[i] + else + failedIndex.push(index) }) }) setPlugins(nextPlugins) - // marketplaceRes?.data.plugins + if (failedIndex.length > 0) + setErrorIndexes([...errorIndexes, ...failedIndex]) } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [isFetchingMarketplaceData]) + }, [isFetchingMarketplaceDataFromDSL]) - const isLoadedAllData = allPlugins.length === plugins.length && plugins.every(p => !!p) + useEffect(() => { + if (!isFetchingMarketplaceDataFromLocal && marketplaceResFromLocalRes?.data.list) { + const payloads = marketplaceResFromLocalRes?.data.list + const failedIndex: number[] = [] + const nextPlugins = produce(getPlugins(), (draft) => { + marketPlaceInDSLIndex.forEach((index, i) => { + if (payloads[i]) { + const item = payloads[i] + draft[index] = { + ...item.plugin, + plugin_id: item.version.unique_identifier, + } + } + else { + failedIndex.push(index) + } + }) + }) + setPlugins(nextPlugins) + if (failedIndex.length > 0) + setErrorIndexes([...errorIndexes, ...failedIndex]) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isFetchingMarketplaceDataFromLocal]) + + const isLoadedAllData = (plugins.filter(p => !!p).length + errorIndexes.length) === allPlugins.length useEffect(() => { if (isLoadedAllData) onLoadedAllPlugin() @@ -76,7 +122,7 @@ const InstallByDSLList: FC = ({ const handleSelect = useCallback((index: number) => { return () => { - onSelect(plugins[index], index) + onSelect(plugins[index]!, index) } }, [onSelect, plugins]) return ( @@ -84,7 +130,7 @@ const InstallByDSLList: FC = ({ {allPlugins.map((d, index) => { if (errorIndexes.includes(index)) { return ( -
error
+ ) } if (d.type === 'github') { diff --git a/web/app/components/plugins/install-plugin/install-bundle/steps/install.tsx b/web/app/components/plugins/install-plugin/install-bundle/steps/install.tsx index 9c361d4e4b..389cb3d9ca 100644 --- a/web/app/components/plugins/install-plugin/install-bundle/steps/install.tsx +++ b/web/app/components/plugins/install-plugin/install-bundle/steps/install.tsx @@ -5,7 +5,7 @@ import type { Dependency, InstallStatusResponse, Plugin } from '../../../types' import Button from '@/app/components/base/button' import { RiLoader2Line } from '@remixicon/react' import { useTranslation } from 'react-i18next' -import InstallByDSLList from './install-multi' +import InstallMulti from './install-multi' import { useInstallFromMarketplaceAndGitHub } from '@/service/use-plugins' import { useInvalidateInstalledPluginList } from '@/service/use-plugins' const i18nPrefix = 'plugin.installModal' @@ -58,7 +58,10 @@ const Install: FC = ({ }, }) const handleInstall = () => { - installFromMarketplaceAndGitHub(allPlugins.filter((_d, index) => selectedIndexes.includes(index))) + installFromMarketplaceAndGitHub({ + payload: allPlugins.filter((_d, index) => selectedIndexes.includes(index)), + plugin: selectedPlugins, + }) } return ( <> @@ -67,7 +70,7 @@ const Install: FC = ({

{t(`${i18nPrefix}.${selectedPluginsNum > 1 ? 'readyToInstallPackages' : 'readyToInstallPackage'}`, { num: selectedPluginsNum })}

- = ({ const [uniqueIdentifier, setUniqueIdentifier] = useState(null) const [manifest, setManifest] = useState(null) const [errorMsg, setErrorMsg] = useState(null) - const isBundle = file.name.endsWith('.bundle') + const isBundle = file.name.endsWith('.difybndl') const [dependencies, setDependencies] = useState([]) const getTitle = useCallback(() => { diff --git a/web/app/components/plugins/install-plugin/install-from-marketplace/index.tsx b/web/app/components/plugins/install-plugin/install-from-marketplace/index.tsx index b721a84454..ad5b596b73 100644 --- a/web/app/components/plugins/install-plugin/install-from-marketplace/index.tsx +++ b/web/app/components/plugins/install-plugin/install-from-marketplace/index.tsx @@ -52,6 +52,7 @@ const InstallFromMarketplace: React.FC = ({ diff --git a/web/app/components/plugins/marketplace/empty/index.tsx b/web/app/components/plugins/marketplace/empty/index.tsx index 25f8efc504..32b706a291 100644 --- a/web/app/components/plugins/marketplace/empty/index.tsx +++ b/web/app/components/plugins/marketplace/empty/index.tsx @@ -4,12 +4,22 @@ import { Group } from '@/app/components/base/icons/src/vender/other' import Line from './line' import cn from '@/utils/classnames' -const Empty = () => { +type Props = { + text?: string + lightCard?: boolean + className?: string +} + +const Empty = ({ + text, + lightCard, + className, +}: Props) => { const { t } = useTranslation() return (
{ Array.from({ length: 16 }).map((_, index) => ( @@ -19,6 +29,7 @@ const Empty = () => { 'mr-3 mb-3 h-[144px] w-[calc((100%-36px)/4)] rounded-xl bg-background-section-burn', index % 4 === 3 && 'mr-0', index > 11 && 'mb-0', + lightCard && 'bg-background-default-lighter', )} >
@@ -28,7 +39,7 @@ const Empty = () => { className='absolute inset-0 bg-marketplace-plugin-empty z-[1]' >
-
+
@@ -36,7 +47,7 @@ const Empty = () => {
- {t('plugin.marketplace.noPluginFound')} + {text || t('plugin.marketplace.noPluginFound')}
diff --git a/web/app/components/plugins/marketplace/list/card-wrapper.tsx b/web/app/components/plugins/marketplace/list/card-wrapper.tsx index 364a1b2b58..06541fe6d6 100644 --- a/web/app/components/plugins/marketplace/list/card-wrapper.tsx +++ b/web/app/components/plugins/marketplace/list/card-wrapper.tsx @@ -48,19 +48,19 @@ const CardWrapper = ({
- + +
) } @@ -94,27 +94,6 @@ const CardWrapper = ({ /> } /> - { - showInstallButton && ( -
- - -
- ) - } ) } diff --git a/web/app/components/plugins/plugin-detail-panel/detail-header.tsx b/web/app/components/plugins/plugin-detail-panel/detail-header.tsx index 5e1b0770f7..0127c0059f 100644 --- a/web/app/components/plugins/plugin-detail-panel/detail-header.tsx +++ b/web/app/components/plugins/plugin-detail-panel/detail-header.tsx @@ -9,7 +9,7 @@ import { RiVerifiedBadgeLine, } from '@remixicon/react' import type { PluginDetail } from '../types' -import { PluginSource } from '../types' +import { PluginSource, PluginType } from '../types' import Description from '../card/base/description' import Icon from '../card/base/card-icon' import Title from '../card/base/title' @@ -30,6 +30,7 @@ import { Github } from '@/app/components/base/icons/src/public/common' import { uninstallPlugin } from '@/service/plugins' import { useGetLanguage } from '@/context/i18n' import { useModalContext } from '@/context/modal-context' +import { useProviderContext } from '@/context/provider-context' import { API_PREFIX, MARKETPLACE_URL_PREFIX } from '@/config' import cn from '@/utils/classnames' @@ -38,7 +39,7 @@ const i18nPrefix = 'plugin.action' type Props = { detail: PluginDetail onHide: () => void - onUpdate: () => void + onUpdate: (isDelete?: boolean) => void } const DetailHeader = ({ @@ -50,6 +51,7 @@ const DetailHeader = ({ const locale = useGetLanguage() const { checkForUpdates, fetchReleases } = useGitHubReleases() const { setShowUpdatePluginModal } = useModalContext() + const { refreshModelProviders } = useProviderContext() const { installation_id, @@ -61,7 +63,7 @@ const DetailHeader = ({ meta, plugin_id, } = detail - const { author, name, label, description, icon, verified } = detail.declaration + const { author, category, name, label, description, icon, verified } = detail.declaration const isFromGitHub = source === PluginSource.github const isFromMarketplace = source === PluginSource.marketplace @@ -77,6 +79,14 @@ const DetailHeader = ({ return false }, [isFromMarketplace, latest_version, version]) + const detailUrl = useMemo(() => { + if (isFromGitHub) + return `https://github.com/${meta!.repo}` + if (isFromMarketplace) + return `${MARKETPLACE_URL_PREFIX}/plugins/${author}/${name}` + return '' + }, [author, isFromGitHub, isFromMarketplace, meta, name]) + const [isShowUpdateModal, { setTrue: showUpdateModal, setFalse: hideUpdateModal, @@ -139,9 +149,11 @@ const DetailHeader = ({ hideDeleting() if (res.success) { hideDeleteConfirm() - onUpdate() + onUpdate(true) + if (category === PluginType.model) + refreshModelProviders() } - }, [hideDeleteConfirm, hideDeleting, installation_id, showDeleting, onUpdate]) + }, [showDeleting, installation_id, hideDeleting, hideDeleteConfirm, onUpdate, category, refreshModelProviders]) // #plugin TODO# used in apps // const usedInApps = 3 @@ -225,7 +237,7 @@ const DetailHeader = ({ onInfo={showPluginInfo} onCheckVersion={handleUpdate} onRemove={showDeleteConfirm} - detailUrl={`${MARKETPLACE_URL_PREFIX}/plugin/${author}/${name}`} + detailUrl={detailUrl} /> diff --git a/web/app/components/plugins/plugin-detail-panel/index.tsx b/web/app/components/plugins/plugin-detail-panel/index.tsx index da8c23a93f..cda554099b 100644 --- a/web/app/components/plugins/plugin-detail-panel/index.tsx +++ b/web/app/components/plugins/plugin-detail-panel/index.tsx @@ -21,6 +21,12 @@ const PluginDetailPanel: FC = ({ const handleHide = () => setCurrentPluginDetail(undefined) + const handleUpdate = (isDelete = false) => { + if (isDelete) + handleHide() + onUpdate() + } + if (!pluginDetail) return null @@ -39,7 +45,7 @@ const PluginDetailPanel: FC = ({
{!!pluginDetail.declaration.tool && } diff --git a/web/app/components/plugins/plugin-detail-panel/model-list.tsx b/web/app/components/plugins/plugin-detail-panel/model-list.tsx index 7980920119..7592126867 100644 --- a/web/app/components/plugins/plugin-detail-panel/model-list.tsx +++ b/web/app/components/plugins/plugin-detail-panel/model-list.tsx @@ -21,7 +21,7 @@ const ModelList = () => {
= ({ className='px-3 py-1.5 rounded-lg text-text-secondary system-md-regular cursor-pointer hover:bg-state-base-hover' >{t('plugin.detailPanel.operation.checkUpdate')}
)} - {source === PluginSource.marketplace && ( + {(source === PluginSource.marketplace || source === PluginSource.github) && ( {t('plugin.detailPanel.operation.viewDetail')} diff --git a/web/app/components/plugins/plugin-item/action.tsx b/web/app/components/plugins/plugin-item/action.tsx index a387727b4f..1bc34c9928 100644 --- a/web/app/components/plugins/plugin-item/action.tsx +++ b/web/app/components/plugins/plugin-item/action.tsx @@ -55,7 +55,7 @@ const Action: FC = ({ const handleFetchNewVersion = async () => { const fetchedReleases = await fetchReleases(author, pluginName) - if (fetchReleases.length === 0) return + if (fetchedReleases.length === 0) return const { needUpdate, toastProps } = checkForUpdates(fetchedReleases, meta!.version) Toast.notify(toastProps) if (needUpdate) { diff --git a/web/app/components/plugins/plugin-item/index.tsx b/web/app/components/plugins/plugin-item/index.tsx index eb833e0781..13c8797358 100644 --- a/web/app/components/plugins/plugin-item/index.tsx +++ b/web/app/components/plugins/plugin-item/index.tsx @@ -12,7 +12,7 @@ import { useTranslation } from 'react-i18next' import { usePluginPageContext } from '../plugin-page/context' import { Github } from '../../base/icons/src/public/common' import Badge from '../../base/badge' -import { type PluginDetail, PluginSource } from '../types' +import { type PluginDetail, PluginSource, PluginType } from '../types' import CornerMark from '../card/base/corner-mark' import Description from '../card/base/description' import OrgInfo from '../card/base/org-info' @@ -23,6 +23,7 @@ import { API_PREFIX, MARKETPLACE_URL_PREFIX } from '@/config' import { useLanguage } from '../../header/account-setting/model-provider-page/hooks' import { useInvalidateInstalledPluginList } from '@/service/use-plugins' import { useCategories } from '../hooks' +import { useProviderContext } from '@/context/provider-context' type Props = { className?: string @@ -39,6 +40,7 @@ const PluginItem: FC = ({ const currentPluginDetail = usePluginPageContext(v => v.currentPluginDetail) const setCurrentPluginDetail = usePluginPageContext(v => v.setCurrentPluginDetail) const invalidateInstalledPluginList = useInvalidateInstalledPluginList() + const { refreshModelProviders } = useProviderContext() const { source, @@ -54,6 +56,12 @@ const PluginItem: FC = ({ const orgName = useMemo(() => { return [PluginSource.github, PluginSource.marketplace].includes(source) ? author : '' }, [source, author]) + + const handleDelete = () => { + invalidateInstalledPluginList() + if (category === PluginType.model) + refreshModelProviders() + } return (
= ({ isShowInfo={source === PluginSource.github} isShowDelete meta={meta} - onDelete={() => { - invalidateInstalledPluginList() - }} + onDelete={handleDelete} />
@@ -136,7 +142,7 @@ const PluginItem: FC = ({ } {source === PluginSource.marketplace && <> - +
{t('plugin.from')} marketplace
diff --git a/web/app/components/plugins/plugin-page/plugin-tasks/index.tsx b/web/app/components/plugins/plugin-page/plugin-tasks/index.tsx index 2d7ae46874..e7fd8ad4ec 100644 --- a/web/app/components/plugins/plugin-page/plugin-tasks/index.tsx +++ b/web/app/components/plugins/plugin-page/plugin-tasks/index.tsx @@ -119,7 +119,7 @@ const PluginTasks = () => {
- +
{t('plugin.task.installedError', { errorLength: errorPlugins.length })} diff --git a/web/app/components/plugins/provider-card.tsx b/web/app/components/plugins/provider-card.tsx index 3f2d889bc0..f0997dc98a 100644 --- a/web/app/components/plugins/provider-card.tsx +++ b/web/app/components/plugins/provider-card.tsx @@ -19,13 +19,11 @@ import { useBoolean } from 'ahooks' type Props = { className?: string payload: Plugin - onSuccess: () => void } const ProviderCard: FC = ({ className, payload, - onSuccess, }) => { const { t } = useTranslation() const [isShowInstallFromMarketplace, { @@ -42,7 +40,7 @@ const ProviderCard: FC = ({
- + <Title title={label[language] || label.en_US} /> {/* <RiVerifiedBadgeLine className="shrink-0 ml-0.5 w-4 h-4 text-text-accent" /> */} </div> <div className='mb-1 flex justify-between items-center h-4'> @@ -54,7 +52,7 @@ const ProviderCard: FC<Props> = ({ </div> </div> </div> - <Description className='mt-3' text={payload.brief[language]} descriptionLineRows={2}></Description> + <Description className='mt-3' text={payload.brief[language] || payload.brief.en_US} descriptionLineRows={2}></Description> <div className='mt-3 flex space-x-0.5'> {payload.tags.map(tag => ( <Badge key={tag.name} text={tag.name} /> @@ -86,10 +84,7 @@ const ProviderCard: FC<Props> = ({ manifest={payload as any} uniqueIdentifier={payload.latest_package_identifier} onClose={hideInstallFromMarketplace} - onSuccess={() => { - onSuccess() - hideInstallFromMarketplace() - }} + onSuccess={() => hideInstallFromMarketplace()} /> ) } diff --git a/web/app/components/plugins/types.ts b/web/app/components/plugins/types.ts index 645ee1a7a2..34cd0c7308 100644 --- a/web/app/components/plugins/types.ts +++ b/web/app/components/plugins/types.ts @@ -310,13 +310,25 @@ export type UninstallPluginResponse = { export type PluginsFromMarketplaceResponse = { plugins: Plugin[] } +export type PluginsFromMarketplaceByInfoResponse = { + list: { + plugin: Plugin + version: { + plugin_name: string + plugin_org: string + unique_identifier: string + } + }[] +} export type GitHubItemAndMarketPlaceDependency = { type: 'github' | 'marketplace' | 'package' value: { repo?: string - version?: string - package?: string + version?: string // from app DSL + package?: string // from app DSL + release?: string // from local package. same to the version + packages?: string // from local package. same to the package github_plugin_unique_identifier?: string marketplace_plugin_unique_identifier?: string plugin_unique_identifier?: string diff --git a/web/app/components/share/text-generation/index.tsx b/web/app/components/share/text-generation/index.tsx index 296bb2bef8..6a7037901f 100644 --- a/web/app/components/share/text-generation/index.tsx +++ b/web/app/components/share/text-generation/index.tsx @@ -391,7 +391,10 @@ const TextGeneration: FC<IMainProps> = ({ const { user_input_form, more_like_this, file_upload, text_to_speech }: any = appParams setVisionConfig({ - ...file_upload.image, + // legacy of image upload compatible + ...file_upload, + transfer_methods: file_upload.allowed_file_upload_methods || file_upload.allowed_upload_methods, + // legacy of image upload compatible image_file_size_limit: appParams?.system_parameters?.image_file_size_limit, }) const prompt_variables = userInputsFormToPromptVariables(user_input_form) diff --git a/web/app/components/tools/provider-list.tsx b/web/app/components/tools/provider-list.tsx index 364c5e00b9..2c0d52b0ba 100644 --- a/web/app/components/tools/provider-list.tsx +++ b/web/app/components/tools/provider-list.tsx @@ -1,5 +1,5 @@ 'use client' -import { useEffect, useMemo, useRef, useState } from 'react' +import { useMemo, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' import type { Collection } from './types' import Marketplace from './marketplace' @@ -9,7 +9,7 @@ import TabSliderNew from '@/app/components/base/tab-slider-new' import LabelFilter from '@/app/components/tools/labels/filter' import Input from '@/app/components/base/input' import ProviderDetail from '@/app/components/tools/provider/detail' -import Empty from '@/app/components/tools/add-tool-modal/empty' +import Empty from '@/app/components/plugins/marketplace/empty' import Card from '@/app/components/plugins/card' import CardMoreInfo from '@/app/components/plugins/card/card-more-info' import { useSelector as useAppContextSelector } from '@/context/app-context' @@ -36,7 +36,7 @@ const ProviderList = () => { const handleKeywordsChange = (value: string) => { setKeywords(value) } - const { data: collectionList, refetch } = useAllToolProviders() + const { data: collectionList = [], refetch } = useAllToolProviders() const filteredCollectionList = useMemo(() => { return collectionList.filter((collection) => { if (collection.type !== activeTab) @@ -50,12 +50,6 @@ const ProviderList = () => { }, [activeTab, tagFilterValue, keywords, collectionList]) const [currentProvider, setCurrentProvider] = useState<Collection | undefined>() - useEffect(() => { - if (currentProvider && collectionList.length > 0) { - const newCurrentProvider = collectionList.find(collection => collection.id === currentProvider.id) - setCurrentProvider(newCurrentProvider) - } - }, [collectionList, currentProvider]) return ( <div className='relative flex overflow-hidden bg-gray-100 shrink-0 h-0 grow'> @@ -88,34 +82,38 @@ const ProviderList = () => { /> </div> </div> - <div className={cn( - 'relative grid content-start grid-cols-1 gap-4 px-12 pt-2 pb-4 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4 grow shrink-0', - )}> - {filteredCollectionList.map(collection => ( - <div - key={collection.id} - onClick={() => setCurrentProvider(collection)} - > - <Card - className={cn( - 'border-[1.5px] border-transparent cursor-pointer', - currentProvider?.id === collection.id && 'border-components-option-card-option-selected-border', - )} - hideCornerMark - payload={{ - ...collection, - brief: collection.description, - } as any} - footer={ - <CardMoreInfo - tags={collection.labels} - /> - } - /> - </div> - ))} - {!filteredCollectionList.length && <div className='absolute top-1/2 left-1/2 -translate-x-1/2 -translate-y-1/2'><Empty /></div>} - </div> + {filteredCollectionList.length > 0 && ( + <div className={cn( + 'relative grid content-start grid-cols-1 gap-4 px-12 pt-2 pb-4 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4 grow shrink-0', + )}> + {filteredCollectionList.map(collection => ( + <div + key={collection.id} + onClick={() => setCurrentProvider(collection)} + > + <Card + className={cn( + 'border-[1.5px] border-transparent cursor-pointer', + currentProvider?.id === collection.id && 'border-components-option-card-option-selected-border', + )} + hideCornerMark + payload={{ + ...collection, + brief: collection.description, + } as any} + footer={ + <CardMoreInfo + tags={collection.labels} + /> + } + /> + </div> + ))} + </div> + )} + {!filteredCollectionList.length && ( + <Empty lightCard text={t('tools.noTools')} className='px-12' /> + )} { enable_marketplace && ( <Marketplace diff --git a/web/app/components/workflow/block-selector/index.tsx b/web/app/components/workflow/block-selector/index.tsx index 6f05ba16fb..2a3cc58467 100644 --- a/web/app/components/workflow/block-selector/index.tsx +++ b/web/app/components/workflow/block-selector/index.tsx @@ -22,6 +22,8 @@ import { PortalToFollowElemTrigger, } from '@/app/components/base/portal-to-follow-elem' import Input from '@/app/components/base/input' +import SearchBox from '@/app/components/plugins/marketplace/search-box' + import { Plus02, } from '@/app/components/base/icons/src/vender/line/general' @@ -60,6 +62,7 @@ const NodeSelector: FC<NodeSelectorProps> = ({ }) => { const { t } = useTranslation() const [searchText, setSearchText] = useState('') + const [tags, setTags] = useState<string[]>([]) const [localOpen, setLocalOpen] = useState(false) const open = openFromProps === undefined ? localOpen : openFromProps const handleOpenChange = useCallback((newOpen: boolean) => { @@ -127,21 +130,35 @@ const NodeSelector: FC<NodeSelectorProps> = ({ <PortalToFollowElemContent className='z-[1000]'> <div className={`rounded-lg border-[0.5px] border-gray-200 bg-white shadow-lg ${popupClassName}`}> <div className='px-2 pt-2' onClick={e => e.stopPropagation()}> - <Input - showLeftIcon - showClearIcon - autoFocus - value={searchText} - placeholder={searchPlaceholder} - onChange={e => setSearchText(e.target.value)} - onClear={() => setSearchText('')} - /> + {activeTab === TabsEnum.Blocks && ( + <Input + showLeftIcon + showClearIcon + autoFocus + value={searchText} + placeholder={searchPlaceholder} + onChange={e => setSearchText(e.target.value)} + onClear={() => setSearchText('')} + /> + )} + {activeTab === TabsEnum.Tools && ( + <SearchBox + search={searchText} + onSearchChange={setSearchText} + tags={tags} + onTagsChange={setTags} + size='small' + placeholder={t('plugin.searchTools')!} + /> + )} + </div> <Tabs activeTab={activeTab} onActiveTabChange={handleActiveTabChange} onSelect={handleSelect} searchText={searchText} + tags={tags} availableBlocksTypes={availableBlocksTypes} noBlocks={noBlocks} /> diff --git a/web/app/components/workflow/block-selector/market-place-plugin/action.tsx b/web/app/components/workflow/block-selector/market-place-plugin/action.tsx index d77ea248fe..6f0c08eeca 100644 --- a/web/app/components/workflow/block-selector/market-place-plugin/action.tsx +++ b/web/app/components/workflow/block-selector/market-place-plugin/action.tsx @@ -11,15 +11,20 @@ import { PortalToFollowElemTrigger, } from '@/app/components/base/portal-to-follow-elem' import cn from '@/utils/classnames' +import { MARKETPLACE_URL_PREFIX } from '@/config' type Props = { open: boolean onOpenChange: (v: boolean) => void + author: string + name: string } const OperationDropdown: FC<Props> = ({ open, onOpenChange, + author, + name, }) => { const { t } = useTranslation() const openRef = useRef(open) @@ -47,11 +52,10 @@ const OperationDropdown: FC<Props> = ({ <RiMoreFill className='w-4 h-4 text-components-button-secondary-accent-text' /> </ActionButton> </PortalToFollowElemTrigger> - <PortalToFollowElemContent className='z-50'> + <PortalToFollowElemContent className='z-[9999]'> <div className='w-[112px] p-1 bg-components-panel-bg-blur rounded-xl border-[0.5px] border-components-panel-border shadow-lg'> <div className='px-3 py-1.5 rounded-lg text-text-secondary system-md-regular cursor-pointer hover:bg-state-base-hover'>{t('common.operation.download')}</div> - {/* Wait marketplace */} - {/* <div className='px-3 py-1.5 rounded-lg text-text-secondary system-md-regular cursor-pointer hover:bg-state-base-hover'>{t('common.operation.viewDetail')}</div> */} + <a href={`${MARKETPLACE_URL_PREFIX}/plugins/${author}/${name}`} target='_blank' className='block px-3 py-1.5 rounded-lg text-text-secondary system-md-regular cursor-pointer hover:bg-state-base-hover'>{t('common.operation.viewDetails')}</a> </div> </PortalToFollowElemContent> </PortalToFollowElem> diff --git a/web/app/components/workflow/block-selector/market-place-plugin/item.tsx b/web/app/components/workflow/block-selector/market-place-plugin/item.tsx index d257533d62..7f2ae34083 100644 --- a/web/app/components/workflow/block-selector/market-place-plugin/item.tsx +++ b/web/app/components/workflow/block-selector/market-place-plugin/item.tsx @@ -5,10 +5,12 @@ import { useContext } from 'use-context-selector' import { useTranslation } from 'react-i18next' import Action from './action' import type { Plugin } from '@/app/components/plugins/types.ts' +import InstallFromMarketplace from '@/app/components/plugins/install-plugin/install-from-marketplace' import I18n from '@/context/i18n' import cn from '@/utils/classnames' import { formatNumber } from '@/utils/format' +import { useBoolean } from 'ahooks' enum ActionType { install = 'install', @@ -28,6 +30,10 @@ const Item: FC<Props> = ({ const { locale } = useContext(I18n) const getLocalizedText = (obj: Record<string, string> | undefined) => obj?.[locale] || obj?.['en-US'] || obj?.en_US || '' + const [isShowInstallModal, { + setTrue: showInstallModal, + setFalse: hideInstallModal, + }] = useBoolean(false) return ( <div className='group/plugin flex rounded-lg py-1 pr-1 pl-3 hover:bg-state-base-hover'> @@ -47,14 +53,23 @@ const Item: FC<Props> = ({ </div> {/* Action */} <div className={cn(!open ? 'hidden' : 'flex', 'group-hover/plugin:flex items-center space-x-1 h-4 text-components-button-secondary-accent-text system-xs-medium')}> - <div className='px-1.5'>{t('plugin.installAction')}</div> + <div className='px-1.5 cursor-pointer' onClick={showInstallModal}>{t('plugin.installAction')}</div> <Action open={open} onOpenChange={setOpen} + author={payload.org} + name={payload.name} /> </div> + {isShowInstallModal && ( + <InstallFromMarketplace + uniqueIdentifier={payload.latest_package_identifier} + manifest={payload} + onSuccess={hideInstallModal} + onClose={hideInstallModal} + /> + )} </div> - </div> ) } diff --git a/web/app/components/workflow/block-selector/tabs.tsx b/web/app/components/workflow/block-selector/tabs.tsx index 5d44011465..e82c39be8c 100644 --- a/web/app/components/workflow/block-selector/tabs.tsx +++ b/web/app/components/workflow/block-selector/tabs.tsx @@ -13,6 +13,7 @@ export type TabsProps = { activeTab: TabsEnum onActiveTabChange: (activeTab: TabsEnum) => void searchText: string + tags: string[] onSelect: (type: BlockEnum, tool?: ToolDefaultValue) => void availableBlocksTypes?: BlockEnum[] noBlocks?: boolean @@ -20,6 +21,7 @@ export type TabsProps = { const Tabs: FC<TabsProps> = ({ activeTab, onActiveTabChange, + tags, searchText, onSelect, availableBlocksTypes, @@ -68,6 +70,7 @@ const Tabs: FC<TabsProps> = ({ <AllTools searchText={searchText} onSelect={onSelect} + tags={tags} buildInTools={buildInTools} customTools={customTools} workflowTools={workflowTools} diff --git a/web/app/components/workflow/nodes/_base/components/file-upload-setting.tsx b/web/app/components/workflow/nodes/_base/components/file-upload-setting.tsx index 42a7213f80..1974084424 100644 --- a/web/app/components/workflow/nodes/_base/components/file-upload-setting.tsx +++ b/web/app/components/workflow/nodes/_base/components/file-upload-setting.tsx @@ -82,8 +82,6 @@ const FileUploadSetting: FC<Props> = ({ const handleCustomFileTypesChange = useCallback((customFileTypes: string[]) => { const newPayload = produce(payload, (draft) => { draft.allowed_file_extensions = customFileTypes.map((v) => { - if (v.startsWith('.')) // Not start with dot - return v.slice(1) return v }) }) @@ -118,7 +116,7 @@ const FileUploadSetting: FC<Props> = ({ type={SupportUploadFileTypes.custom} selected={allowed_file_types.includes(SupportUploadFileTypes.custom)} onToggle={handleSupportFileTypeChange} - customFileTypes={allowed_file_extensions?.map(item => `.${item}`)} + customFileTypes={allowed_file_extensions} onCustomFileTypesChange={handleCustomFileTypesChange} /> </div> diff --git a/web/app/components/workflow/nodes/tool/components/input-var-list.tsx b/web/app/components/workflow/nodes/tool/components/input-var-list.tsx index e47082f4b7..10c534509c 100644 --- a/web/app/components/workflow/nodes/tool/components/input-var-list.tsx +++ b/web/app/components/workflow/nodes/tool/components/input-var-list.tsx @@ -46,6 +46,8 @@ const InputVarList: FC<Props> = ({ const paramType = (type: string) => { if (type === FormTypeEnum.textNumber) return 'Number' + else if (type === FormTypeEnum.file) + return 'File' else if (type === FormTypeEnum.files) return 'Files' else if (type === FormTypeEnum.select) diff --git a/web/app/components/workflow/note-node/index.tsx b/web/app/components/workflow/note-node/index.tsx index ec2bb84f68..6d62b452e4 100644 --- a/web/app/components/workflow/note-node/index.tsx +++ b/web/app/components/workflow/note-node/index.tsx @@ -81,7 +81,6 @@ const NoteNode = ({ nodeData={data} icon={<Icon />} minWidth={240} - maxWidth={640} minHeight={88} /> <div className='shrink-0 h-2 opacity-50 rounded-t-md' style={{ background: THEME_MAP[theme].title }}></div> diff --git a/web/app/components/workflow/style.css b/web/app/components/workflow/style.css index 9ec8586ccc..ca1d24a52e 100644 --- a/web/app/components/workflow/style.css +++ b/web/app/components/workflow/style.css @@ -15,4 +15,8 @@ #workflow-container .react-flow__selection { border: 1px solid #528BFF; background: rgba(21, 94, 239, 0.05); +} + +#workflow-container .react-flow__node-custom-note { + z-index: -1000 !important; } \ No newline at end of file diff --git a/web/app/signin/normalForm.tsx b/web/app/signin/normalForm.tsx index f4f46c68ba..783d8ac507 100644 --- a/web/app/signin/normalForm.tsx +++ b/web/app/signin/normalForm.tsx @@ -2,7 +2,7 @@ import React, { useCallback, useEffect, useState } from 'react' import { useTranslation } from 'react-i18next' import Link from 'next/link' import { useRouter, useSearchParams } from 'next/navigation' -import { RiDoorLockLine } from '@remixicon/react' +import { RiContractLine, RiDoorLockLine, RiErrorWarningFill } from '@remixicon/react' import Loading from '../components/base/loading' import MailAndCodeAuth from './components/mail-and-code-auth' import MailAndPasswordAuth from './components/mail-and-password-auth' @@ -10,7 +10,7 @@ import SocialAuth from './components/social-auth' import SSOAuth from './components/sso-auth' import cn from '@/utils/classnames' import { getSystemFeatures, invitationCheck } from '@/service/common' -import { defaultSystemFeatures } from '@/types/feature' +import { LicenseStatus, defaultSystemFeatures } from '@/types/feature' import Toast from '@/app/components/base/toast' import { IS_CE_EDITION } from '@/config' @@ -83,6 +83,48 @@ const NormalForm = () => { <Loading type='area' /> </div> } + if (systemFeatures.license?.status === LicenseStatus.LOST) { + return <div className='w-full mx-auto mt-8'> + <div className='bg-white'> + <div className="p-4 rounded-lg bg-gradient-to-r from-workflow-workflow-progress-bg-1 to-workflow-workflow-progress-bg-2"> + <div className='flex items-center justify-center w-10 h-10 rounded-xl bg-components-card-bg shadow shadows-shadow-lg mb-2 relative'> + <RiContractLine className='w-5 h-5' /> + <RiErrorWarningFill className='absolute w-4 h-4 text-text-warning-secondary -top-1 -right-1' /> + </div> + <p className='system-sm-medium text-text-primary'>{t('login.licenseLost')}</p> + <p className='system-xs-regular text-text-tertiary mt-1'>{t('login.licenseLostTip')}</p> + </div> + </div> + </div> + } + if (systemFeatures.license?.status === LicenseStatus.EXPIRED) { + return <div className='w-full mx-auto mt-8'> + <div className='bg-white'> + <div className="p-4 rounded-lg bg-gradient-to-r from-workflow-workflow-progress-bg-1 to-workflow-workflow-progress-bg-2"> + <div className='flex items-center justify-center w-10 h-10 rounded-xl bg-components-card-bg shadow shadows-shadow-lg mb-2 relative'> + <RiContractLine className='w-5 h-5' /> + <RiErrorWarningFill className='absolute w-4 h-4 text-text-warning-secondary -top-1 -right-1' /> + </div> + <p className='system-sm-medium text-text-primary'>{t('login.licenseExpired')}</p> + <p className='system-xs-regular text-text-tertiary mt-1'>{t('login.licenseExpiredTip')}</p> + </div> + </div> + </div> + } + if (systemFeatures.license?.status === LicenseStatus.INACTIVE) { + return <div className='w-full mx-auto mt-8'> + <div className='bg-white'> + <div className="p-4 rounded-lg bg-gradient-to-r from-workflow-workflow-progress-bg-1 to-workflow-workflow-progress-bg-2"> + <div className='flex items-center justify-center w-10 h-10 rounded-xl bg-components-card-bg shadow shadows-shadow-lg mb-2 relative'> + <RiContractLine className='w-5 h-5' /> + <RiErrorWarningFill className='absolute w-4 h-4 text-text-warning-secondary -top-1 -right-1' /> + </div> + <p className='system-sm-medium text-text-primary'>{t('login.licenseInactive')}</p> + <p className='system-xs-regular text-text-tertiary mt-1'>{t('login.licenseInactiveTip')}</p> + </div> + </div> + </div> + } return ( <> diff --git a/web/config/index.ts b/web/config/index.ts index 6e8c4a630c..c3f03c1235 100644 --- a/web/config/index.ts +++ b/web/config/index.ts @@ -274,4 +274,4 @@ export const DISABLE_UPLOAD_IMAGE_AS_ICON = process.env.NEXT_PUBLIC_DISABLE_UPLO export const GITHUB_ACCESS_TOKEN = process.env.NEXT_PUBLIC_GITHUB_ACCESS_TOKEN || globalThis.document?.body?.getAttribute('data-public-github-access-token') || '' -export const SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS = '.difypkg,.bundle' +export const SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS = '.difypkg,.difybndl' diff --git a/web/context/app-context.tsx b/web/context/app-context.tsx index 78ac1c9848..369fe5af19 100644 --- a/web/context/app-context.tsx +++ b/web/context/app-context.tsx @@ -144,7 +144,7 @@ export const AppContextProvider: FC<AppContextProviderProps> = ({ children }) => theme, setTheme: handleSetTheme, apps: appList.data, - systemFeatures, + systemFeatures: { ...defaultSystemFeatures, ...systemFeatures }, mutateApps, userProfile, mutateUserProfile, diff --git a/web/context/provider-context.tsx b/web/context/provider-context.tsx index 814792ef0e..83039ca7a0 100644 --- a/web/context/provider-context.tsx +++ b/web/context/provider-context.tsx @@ -21,6 +21,7 @@ import { defaultPlan } from '@/app/components/billing/config' type ProviderContextState = { modelProviders: ModelProvider[] + refreshModelProviders: () => void textGenerationModelList: Model[] supportRetrievalMethods: RETRIEVE_METHOD[] isAPIKeySet: boolean @@ -38,6 +39,7 @@ type ProviderContextState = { } const ProviderContext = createContext<ProviderContextState>({ modelProviders: [], + refreshModelProviders: () => { }, textGenerationModelList: [], supportRetrievalMethods: [], isAPIKeySet: true, @@ -79,7 +81,7 @@ type ProviderContextProviderProps = { export const ProviderContextProvider = ({ children, }: ProviderContextProviderProps) => { - const { data: providersData } = useSWR('/workspaces/current/model-providers', fetchModelProviders) + const { data: providersData, mutate: refreshModelProviders } = useSWR('/workspaces/current/model-providers', fetchModelProviders) const fetchModelListUrlPrefix = '/workspaces/current/models/model-types/' const { data: textGenerationModelList } = useSWR(`${fetchModelListUrlPrefix}${ModelTypeEnum.textGeneration}`, fetchModelList) const { data: supportRetrievalMethods } = useSWR('/datasets/retrieval-setting', fetchSupportRetrievalMethods) @@ -112,6 +114,7 @@ export const ProviderContextProvider = ({ return ( <ProviderContext.Provider value={{ modelProviders: providersData?.data || [], + refreshModelProviders, textGenerationModelList: textGenerationModelList?.data || [], isAPIKeySet: !!textGenerationModelList?.data.some(model => model.status === ModelStatusEnum.active), supportRetrievalMethods: supportRetrievalMethods?.retrieval_method || [], diff --git a/web/context/query-client.tsx b/web/context/query-client.tsx index 1adb8af653..f85930515c 100644 --- a/web/context/query-client.tsx +++ b/web/context/query-client.tsx @@ -4,7 +4,15 @@ import type { FC, PropsWithChildren } from 'react' import { QueryClient, QueryClientProvider } from '@tanstack/react-query' import { ReactQueryDevtools } from '@tanstack/react-query-devtools' -const client = new QueryClient() +const STALE_TIME = 1000 * 60 * 30 // 30 minutes + +const client = new QueryClient({ + defaultOptions: { + queries: { + staleTime: STALE_TIME, + }, + }, +}) export const TanstackQueryIniter: FC<PropsWithChildren> = (props) => { const { children } = props diff --git a/web/i18n/de-DE/common.ts b/web/i18n/de-DE/common.ts index 0b6d6fa0b2..085371aeb1 100644 --- a/web/i18n/de-DE/common.ts +++ b/web/i18n/de-DE/common.ts @@ -591,6 +591,10 @@ const translation = { uploadFromComputerReadError: 'Lesen der Datei fehlgeschlagen, bitte versuchen Sie es erneut.', fileExtensionNotSupport: 'Dateiendung nicht bedient', }, + license: { + expiring: 'Läuft an einem Tag ab', + expiring_plural: 'Läuft in {{count}} Tagen ab', + }, } export default translation diff --git a/web/i18n/de-DE/login.ts b/web/i18n/de-DE/login.ts index 6736e34914..2e0d51cf85 100644 --- a/web/i18n/de-DE/login.ts +++ b/web/i18n/de-DE/login.ts @@ -99,6 +99,12 @@ const translation = { noLoginMethodTip: 'Wenden Sie sich an den Systemadministrator, um eine Authentifizierungsmethode hinzuzufügen.', usePassword: 'Passwort verwenden', noLoginMethod: 'Authentifizierungsmethode nicht konfiguriert', + licenseExpired: 'Lizenz abgelaufen', + licenseLostTip: 'Fehler beim Verbinden des Dify-Lizenzservers. Wenden Sie sich an Ihren Administrator, um Dify weiterhin zu verwenden.', + licenseInactive: 'Lizenz inaktiv', + licenseInactiveTip: 'Die Dify Enterprise-Lizenz für Ihren Arbeitsbereich ist inaktiv. Wenden Sie sich an Ihren Administrator, um Dify weiterhin zu verwenden.', + licenseExpiredTip: 'Die Dify Enterprise-Lizenz für Ihren Arbeitsbereich ist abgelaufen. Wenden Sie sich an Ihren Administrator, um Dify weiterhin zu verwenden.', + licenseLost: 'Lizenz verloren', } export default translation diff --git a/web/i18n/de-DE/workflow.ts b/web/i18n/de-DE/workflow.ts index d05070c308..9ee5289429 100644 --- a/web/i18n/de-DE/workflow.ts +++ b/web/i18n/de-DE/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, type: 'Art', binaryFileVariable: 'Variable der Binärdatei', + extractListPlaceholder: 'Geben Sie den Index des Listeneintrags ein, geben Sie \'/\' ein, fügen Sie die Variable ein', }, code: { inputVars: 'Eingabevariablen', @@ -618,6 +619,7 @@ const translation = { filterConditionKey: 'Bedingungsschlüssel filtern', filterCondition: 'Filter-Bedingung', selectVariableKeyPlaceholder: 'Untervariablenschlüssel auswählen', + extractsCondition: 'Extrahieren des N-Elements', }, }, tracing: { diff --git a/web/i18n/en-US/app-api.ts b/web/i18n/en-US/app-api.ts index 355ff30602..1fba63c977 100644 --- a/web/i18n/en-US/app-api.ts +++ b/web/i18n/en-US/app-api.ts @@ -78,6 +78,7 @@ const translation = { requestBody: 'Request Body', pathParams: 'Path Params', query: 'Query', + toc: 'Contents', }, } diff --git a/web/i18n/en-US/common.ts b/web/i18n/en-US/common.ts index 97b158904b..3f46e32e8d 100644 --- a/web/i18n/en-US/common.ts +++ b/web/i18n/en-US/common.ts @@ -27,6 +27,7 @@ const translation = { lineBreak: 'Line break', sure: 'I\'m sure', download: 'Download', + viewDetails: 'View Details', delete: 'Delete', settings: 'Settings', setup: 'Setup', @@ -602,6 +603,10 @@ const translation = { created: 'Tag created successfully', failed: 'Tag creation failed', }, + license: { + expiring: 'Expiring in one day', + expiring_plural: 'Expiring in {{count}} days', + }, } export default translation diff --git a/web/i18n/en-US/login.ts b/web/i18n/en-US/login.ts index b47d7bd69a..5ff7b80b4e 100644 --- a/web/i18n/en-US/login.ts +++ b/web/i18n/en-US/login.ts @@ -98,6 +98,12 @@ const translation = { back: 'Back', noLoginMethod: 'Authentication method not configured', noLoginMethodTip: 'Please contact the system admin to add an authentication method.', + licenseExpired: 'License Expired', + licenseExpiredTip: 'The Dify Enterprise license for your workspace has expired. Please contact your administrator to continue using Dify.', + licenseLost: 'License Lost', + licenseLostTip: 'Failed to connect Dify license server. Please contact your administrator to continue using Dify.', + licenseInactive: 'License Inactive', + licenseInactiveTip: 'The Dify Enterprise license for your workspace is inactive. Please contact your administrator to continue using Dify.', } export default translation diff --git a/web/i18n/en-US/plugin.ts b/web/i18n/en-US/plugin.ts index 0e8e6dfccd..bcbb1648f4 100644 --- a/web/i18n/en-US/plugin.ts +++ b/web/i18n/en-US/plugin.ts @@ -116,6 +116,8 @@ const translation = { cancel: 'Cancel', back: 'Back', next: 'Next', + pluginLoadError: 'Plugin load error', + pluginLoadErrorDesc: 'This plugin will not be installed', }, installFromGitHub: { installPlugin: 'Install plugin from GitHub', diff --git a/web/i18n/en-US/tools.ts b/web/i18n/en-US/tools.ts index 37250473d6..6bc11fdd2c 100644 --- a/web/i18n/en-US/tools.ts +++ b/web/i18n/en-US/tools.ts @@ -154,6 +154,7 @@ const translation = { placeholder: 'Select a tool...', auth: 'AUTHORIZATION', }, + noTools: 'No tools found', } export default translation diff --git a/web/i18n/es-ES/common.ts b/web/i18n/es-ES/common.ts index c2cef9afcd..8eba3258c8 100644 --- a/web/i18n/es-ES/common.ts +++ b/web/i18n/es-ES/common.ts @@ -591,6 +591,10 @@ const translation = { pasteFileLinkInputPlaceholder: 'Introduzca la URL...', uploadFromComputerLimit: 'El archivo de carga no puede exceder {{size}}', }, + license: { + expiring: 'Caduca en un día', + expiring_plural: 'Caducando en {{count}} días', + }, } export default translation diff --git a/web/i18n/es-ES/login.ts b/web/i18n/es-ES/login.ts index 3a6debbe48..bb465ac1be 100644 --- a/web/i18n/es-ES/login.ts +++ b/web/i18n/es-ES/login.ts @@ -99,6 +99,12 @@ const translation = { noLoginMethod: 'Método de autenticación no configurado', setYourAccount: 'Configura tu cuenta', noLoginMethodTip: 'Póngase en contacto con el administrador del sistema para agregar un método de autenticación.', + licenseInactive: 'Licencia inactiva', + licenseInactiveTip: 'La licencia de Dify Enterprise para su espacio de trabajo está inactiva. Póngase en contacto con su administrador para seguir utilizando Dify.', + licenseExpired: 'Licencia caducada', + licenseLost: 'Licencia perdida', + licenseExpiredTip: 'La licencia de Dify Enterprise para su espacio de trabajo ha caducado. Póngase en contacto con su administrador para seguir utilizando Dify.', + licenseLostTip: 'No se pudo conectar el servidor de licencias de Dife. Póngase en contacto con su administrador para seguir utilizando Dify.', } export default translation diff --git a/web/i18n/es-ES/workflow.ts b/web/i18n/es-ES/workflow.ts index 6c9af49c4d..4b3da57d7c 100644 --- a/web/i18n/es-ES/workflow.ts +++ b/web/i18n/es-ES/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, type: 'Tipo', binaryFileVariable: 'Variable de archivo binario', + extractListPlaceholder: 'Introduzca el índice de elementos de la lista, escriba \'/\' insertar variable', }, code: { inputVars: 'Variables de entrada', @@ -621,6 +622,7 @@ const translation = { filterConditionComparisonOperator: 'Operador de comparación de condiciones de filtro', asc: 'ASC', selectVariableKeyPlaceholder: 'Seleccione la clave de subvariable', + extractsCondition: 'Extraiga el elemento N', }, }, tracing: { diff --git a/web/i18n/fa-IR/common.ts b/web/i18n/fa-IR/common.ts index 9ec9332ce8..969c134be7 100644 --- a/web/i18n/fa-IR/common.ts +++ b/web/i18n/fa-IR/common.ts @@ -591,6 +591,10 @@ const translation = { pasteFileLink: 'پیوند فایل را جایگذاری کنید', uploadFromComputerLimit: 'آپلود فایل نمی تواند از {{size}} تجاوز کند', }, + license: { + expiring_plural: 'انقضا در {{count}} روز', + expiring: 'انقضا در یک روز', + }, } export default translation diff --git a/web/i18n/fa-IR/login.ts b/web/i18n/fa-IR/login.ts index 0f2fe9464a..7394ab325f 100644 --- a/web/i18n/fa-IR/login.ts +++ b/web/i18n/fa-IR/login.ts @@ -99,6 +99,12 @@ const translation = { noLoginMethod: 'روش احراز هویت پیکربندی نشده است', noLoginMethodTip: 'لطفا برای افزودن روش احراز هویت با مدیر سیستم تماس بگیرید.', resetPasswordDesc: 'ایمیلی را که برای ثبت نام در Dify استفاده کرده اید تایپ کنید و ما یک ایمیل بازنشانی رمز عبور برای شما ارسال خواهیم کرد.', + licenseInactive: 'مجوز غیر فعال', + licenseLost: 'مجوز گم شده است', + licenseExpired: 'مجوز منقضی شده است', + licenseExpiredTip: 'مجوز Dify Enterprise برای فضای کاری شما منقضی شده است. لطفا برای ادامه استفاده از Dify با سرپرست خود تماس بگیرید.', + licenseInactiveTip: 'مجوز Dify Enterprise برای فضای کاری شما غیرفعال است. لطفا برای ادامه استفاده از Dify با سرپرست خود تماس بگیرید.', + licenseLostTip: 'اتصال سرور مجوز Dify انجام نشد. لطفا برای ادامه استفاده از Dify با سرپرست خود تماس بگیرید.', } export default translation diff --git a/web/i18n/fa-IR/workflow.ts b/web/i18n/fa-IR/workflow.ts index 4b00390663..c23cf8f62e 100644 --- a/web/i18n/fa-IR/workflow.ts +++ b/web/i18n/fa-IR/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, binaryFileVariable: 'متغیر فایل باینری', type: 'نوع', + extractListPlaceholder: 'فهرست آیتم لیست را وارد کنید، متغیر درج \'/\' را تایپ کنید', }, code: { inputVars: 'متغیرهای ورودی', @@ -618,6 +619,7 @@ const translation = { filterConditionComparisonValue: 'مقدار شرایط فیلتر', selectVariableKeyPlaceholder: 'کلید متغیر فرعی را انتخاب کنید', asc: 'صعودی', + extractsCondition: 'مورد N را استخراج کنید', }, }, tracing: { diff --git a/web/i18n/fr-FR/common.ts b/web/i18n/fr-FR/common.ts index f6b5b62b30..62b5a1ca10 100644 --- a/web/i18n/fr-FR/common.ts +++ b/web/i18n/fr-FR/common.ts @@ -591,6 +591,10 @@ const translation = { pasteFileLinkInvalid: 'Lien de fichier non valide', uploadFromComputerLimit: 'Le fichier de téléchargement ne peut pas dépasser {{size}}', }, + license: { + expiring: 'Expirant dans un jour', + expiring_plural: 'Expirant dans {{count}} jours', + }, } export default translation diff --git a/web/i18n/fr-FR/login.ts b/web/i18n/fr-FR/login.ts index 2f59b8afef..a7a633f330 100644 --- a/web/i18n/fr-FR/login.ts +++ b/web/i18n/fr-FR/login.ts @@ -99,6 +99,12 @@ const translation = { noLoginMethodTip: 'Veuillez contacter l’administrateur système pour ajouter une méthode d’authentification.', resetPasswordDesc: 'Tapez l’adresse e-mail que vous avez utilisée pour vous inscrire sur Dify et nous vous enverrons un e-mail de réinitialisation de mot de passe.', usePassword: 'Utiliser le mot de passe', + licenseInactiveTip: 'La licence Dify Enterprise de votre espace de travail est inactive. Veuillez contacter votre administrateur pour continuer à utiliser Dify.', + licenseLostTip: 'Échec de la connexion au serveur de licences Dify. Veuillez contacter votre administrateur pour continuer à utiliser Dify.', + licenseExpired: 'Licence expirée', + licenseLost: 'Licence perdue', + licenseExpiredTip: 'La licence Dify Enterprise de votre espace de travail a expiré. Veuillez contacter votre administrateur pour continuer à utiliser Dify.', + licenseInactive: 'Licence inactive', } export default translation diff --git a/web/i18n/fr-FR/workflow.ts b/web/i18n/fr-FR/workflow.ts index e736e2cb07..dd41c8f9d1 100644 --- a/web/i18n/fr-FR/workflow.ts +++ b/web/i18n/fr-FR/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, binaryFileVariable: 'Variable de fichier binaire', type: 'Type', + extractListPlaceholder: 'Entrez l’index de l’élément de liste, tapez \'/\' insérer la variable', }, code: { inputVars: 'Variables de saisie', @@ -618,6 +619,7 @@ const translation = { limit: 'Haut N', orderBy: 'Trier par', filterConditionKey: 'Clé de condition de filtre', + extractsCondition: 'Extraire l’élément N', }, }, tracing: { diff --git a/web/i18n/hi-IN/common.ts b/web/i18n/hi-IN/common.ts index 80deee1f94..569ee0a1af 100644 --- a/web/i18n/hi-IN/common.ts +++ b/web/i18n/hi-IN/common.ts @@ -613,6 +613,10 @@ const translation = { fileExtensionNotSupport: 'फ़ाइल एक्सटेंशन समर्थित नहीं है', uploadFromComputer: 'स्थानीय अपलोड', }, + license: { + expiring: 'एक दिन में समाप्त हो रहा है', + expiring_plural: '{{गिनती}} दिनों में समाप्त हो रहा है', + }, } export default translation diff --git a/web/i18n/hi-IN/login.ts b/web/i18n/hi-IN/login.ts index e3ad60d572..0be8cbc3ab 100644 --- a/web/i18n/hi-IN/login.ts +++ b/web/i18n/hi-IN/login.ts @@ -104,6 +104,12 @@ const translation = { resetPasswordDesc: 'वह ईमेल टाइप करें जिसका उपयोग आपने Dify पर साइन अप करने के लिए किया था और हम आपको एक पासवर्ड रीसेट ईमेल भेजेंगे।', withSSO: 'एसएसओ के साथ जारी रखें', back: 'पीछे', + licenseInactive: 'लाइसेंस निष्क्रिय', + licenseExpired: 'लाइसेंस की समय सीमा समाप्त हो गई', + licenseLost: 'लाइसेंस खो गया', + licenseLostTip: 'Dify लायसेंस सर्वर से कनेक्ट करने में विफल. Dify का उपयोग जारी रखने के लिए कृपया अपने व्यवस्थापक से संपर्क करें.', + licenseInactiveTip: 'आपके कार्यस्थल के लिए डिफाई एंटरप्राइज लाइसेंस निष्क्रिय है। कृपया डिफाई का उपयोग जारी रखने के लिए अपने प्रशासक से संपर्क करें।', + licenseExpiredTip: 'आपके कार्यस्थल के लिए डिफाई एंटरप्राइज लाइसेंस समाप्त हो गया है। कृपया डिफाई का उपयोग जारी रखने के लिए अपने प्रशासक से संपर्क करें।', } export default translation diff --git a/web/i18n/hi-IN/workflow.ts b/web/i18n/hi-IN/workflow.ts index 4112643488..70ddf1145f 100644 --- a/web/i18n/hi-IN/workflow.ts +++ b/web/i18n/hi-IN/workflow.ts @@ -420,6 +420,7 @@ const translation = { }, type: 'प्रकार', binaryFileVariable: 'बाइनरी फ़ाइल चर', + extractListPlaceholder: 'सूची आइटम इंडेक्स दर्ज करें, \'/\' इन्सर्ट वेरिएबल टाइप करें', }, code: { inputVars: 'इनपुट वेरिएबल्स', @@ -638,6 +639,7 @@ const translation = { filterConditionComparisonOperator: 'फ़िल्टर शर्त तुलन ऑपरेटर', selectVariableKeyPlaceholder: 'उप चर कुंजी का चयन करें', inputVar: 'इनपुट वेरिएबल', + extractsCondition: 'N आइटम निकालें', }, }, tracing: { diff --git a/web/i18n/it-IT/common.ts b/web/i18n/it-IT/common.ts index 46586179ec..971c292852 100644 --- a/web/i18n/it-IT/common.ts +++ b/web/i18n/it-IT/common.ts @@ -622,6 +622,10 @@ const translation = { pasteFileLink: 'Incolla il collegamento del file', uploadFromComputerReadError: 'Lettura del file non riuscita, riprovare.', }, + license: { + expiring_plural: 'Scadenza tra {{count}} giorni', + expiring: 'Scadenza in un giorno', + }, } export default translation diff --git a/web/i18n/it-IT/login.ts b/web/i18n/it-IT/login.ts index eb547ec661..350424259e 100644 --- a/web/i18n/it-IT/login.ts +++ b/web/i18n/it-IT/login.ts @@ -109,6 +109,12 @@ const translation = { resetPasswordDesc: 'Digita l\'e-mail che hai utilizzato per registrarti su Dify e ti invieremo un\'e-mail per reimpostare la password.', noLoginMethodTip: 'Contatta l\'amministratore di sistema per aggiungere un metodo di autenticazione.', enterYourName: 'Inserisci il tuo nome utente', + licenseLostTip: 'Impossibile connettersi al server licenze Dify. Contatta il tuo amministratore per continuare a utilizzare Dify.', + licenseExpired: 'Licenza scaduta', + licenseLost: 'Licenza persa', + licenseExpiredTip: 'La licenza Dify Enterprise per la tua area di lavoro è scaduta. Contatta il tuo amministratore per continuare a utilizzare Dify.', + licenseInactiveTip: 'La licenza Dify Enterprise per la tua area di lavoro è inattiva. Contatta il tuo amministratore per continuare a utilizzare Dify.', + licenseInactive: 'Licenza inattiva', } export default translation diff --git a/web/i18n/it-IT/workflow.ts b/web/i18n/it-IT/workflow.ts index 756fb665af..db617425e1 100644 --- a/web/i18n/it-IT/workflow.ts +++ b/web/i18n/it-IT/workflow.ts @@ -424,6 +424,7 @@ const translation = { }, binaryFileVariable: 'Variabile file binario', type: 'Digitare', + extractListPlaceholder: 'Inserisci l\'indice delle voci dell\'elenco, digita \'/\' inserisci la variabile', }, code: { inputVars: 'Variabili di Input', @@ -645,6 +646,7 @@ const translation = { desc: 'DESC', filterConditionComparisonValue: 'Valore della condizione di filtro', orderBy: 'Ordina per', + extractsCondition: 'Estrai l\'elemento N', }, }, tracing: { diff --git a/web/i18n/ja-JP/common.ts b/web/i18n/ja-JP/common.ts index 19f502c928..20182d8980 100644 --- a/web/i18n/ja-JP/common.ts +++ b/web/i18n/ja-JP/common.ts @@ -591,6 +591,10 @@ const translation = { fileExtensionNotSupport: 'ファイル拡張子はサポートされていません', pasteFileLinkInvalid: '無効なファイルリンク', }, + license: { + expiring_plural: '有効期限 {{count}} 日', + expiring: '1日で有効期限が切れます', + }, } export default translation diff --git a/web/i18n/ja-JP/login.ts b/web/i18n/ja-JP/login.ts index 178c2617ae..fe4510686b 100644 --- a/web/i18n/ja-JP/login.ts +++ b/web/i18n/ja-JP/login.ts @@ -99,6 +99,12 @@ const translation = { sendVerificationCode: '確認コードの送信', enterYourName: 'ユーザー名を入力してください', resetPasswordDesc: 'Difyへのサインアップに使用したメールアドレスを入力すると、パスワードリセットメールが送信されます。', + licenseLost: 'ライセンスを失った', + licenseExpiredTip: 'ワークスペースの Dify Enterprise ライセンスの有効期限が切れています。Difyを引き続き使用するには、管理者に連絡してください。', + licenseInactive: 'ライセンスが非アクティブです', + licenseInactiveTip: 'ワークスペースの Dify Enterprise ライセンスが非アクティブです。Difyを引き続き使用するには、管理者に連絡してください。', + licenseExpired: 'ライセンスの有効期限が切れています', + licenseLostTip: 'Difyライセンスサーバーへの接続に失敗しました。続けてDifyを使用するために管理者に連絡してください。', } export default translation diff --git a/web/i18n/ja-JP/workflow.ts b/web/i18n/ja-JP/workflow.ts index a82ba71e48..95e667de4a 100644 --- a/web/i18n/ja-JP/workflow.ts +++ b/web/i18n/ja-JP/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, type: 'タイプ', binaryFileVariable: 'バイナリファイル変数', + extractListPlaceholder: 'リスト項目のインデックスを入力し、変数を挿入 \'/\' と入力します', }, code: { inputVars: '入力変数', @@ -619,6 +620,7 @@ const translation = { filterConditionComparisonOperator: 'フィルター条件を比較オペレーター', inputVar: '入力変数', desc: 'DESC', + extractsCondition: 'N個のアイテムを抽出します', }, }, tracing: { diff --git a/web/i18n/ko-KR/common.ts b/web/i18n/ko-KR/common.ts index 43e7402bd4..a5a697b2e5 100644 --- a/web/i18n/ko-KR/common.ts +++ b/web/i18n/ko-KR/common.ts @@ -587,6 +587,10 @@ const translation = { uploadFromComputerLimit: '업로드 파일은 {{size}}를 초과할 수 없습니다.', uploadFromComputerUploadError: '파일 업로드에 실패했습니다. 다시 업로드하십시오.', }, + license: { + expiring_plural: '{{count}}일 후에 만료', + expiring: '하루 후에 만료', + }, } export default translation diff --git a/web/i18n/ko-KR/login.ts b/web/i18n/ko-KR/login.ts index a338ce5ed7..05a60c7b68 100644 --- a/web/i18n/ko-KR/login.ts +++ b/web/i18n/ko-KR/login.ts @@ -99,6 +99,12 @@ const translation = { enterYourName: '사용자 이름을 입력해 주세요', noLoginMethodTip: '인증 방법을 추가하려면 시스템 관리자에게 문의하십시오.', resetPasswordDesc: 'Dify에 가입할 때 사용한 이메일을 입력하면 비밀번호 재설정 이메일을 보내드립니다.', + licenseInactiveTip: '작업 영역에 대한 Dify Enterprise 라이선스가 비활성 상태입니다. Dify를 계속 사용하려면 관리자에게 문의하십시오.', + licenseLost: '라이센스 분실', + licenseLostTip: 'Dify 라이선스 서버에 연결하지 못했습니다. Dify를 계속 사용하려면 관리자에게 문의하십시오.', + licenseInactive: 'License Inactive(라이선스 비활성)', + licenseExpired: '라이센스가 만료되었습니다.', + licenseExpiredTip: '작업 영역에 대한 Dify Enterprise 라이선스가 만료되었습니다. Dify를 계속 사용하려면 관리자에게 문의하십시오.', } export default translation diff --git a/web/i18n/ko-KR/workflow.ts b/web/i18n/ko-KR/workflow.ts index 589831401c..158d407e2a 100644 --- a/web/i18n/ko-KR/workflow.ts +++ b/web/i18n/ko-KR/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, type: '형', binaryFileVariable: '바이너리 파일 변수', + extractListPlaceholder: '목록 항목 인덱스 입력, \'/\' 변수 삽입', }, code: { inputVars: '입력 변수', @@ -618,6 +619,7 @@ const translation = { orderBy: '정렬 기준', selectVariableKeyPlaceholder: '하위 변수 키 선택', filterConditionComparisonOperator: '필터 조건 비교 연산자', + extractsCondition: 'N 항목을 추출합니다.', }, }, tracing: { diff --git a/web/i18n/pl-PL/common.ts b/web/i18n/pl-PL/common.ts index 0a0f7adb99..3dff3f5c27 100644 --- a/web/i18n/pl-PL/common.ts +++ b/web/i18n/pl-PL/common.ts @@ -609,6 +609,10 @@ const translation = { fileExtensionNotSupport: 'Rozszerzenie pliku nie jest obsługiwane', uploadFromComputer: 'Przesyłanie lokalne', }, + license: { + expiring_plural: 'Wygasa za {{count}} dni', + expiring: 'Wygasa w ciągu jednego dnia', + }, } export default translation diff --git a/web/i18n/pl-PL/login.ts b/web/i18n/pl-PL/login.ts index 78f11f3a26..99719fe71a 100644 --- a/web/i18n/pl-PL/login.ts +++ b/web/i18n/pl-PL/login.ts @@ -104,6 +104,12 @@ const translation = { or: 'LUB', noLoginMethodTip: 'Skontaktuj się z administratorem systemu, aby dodać metodę uwierzytelniania.', noLoginMethod: 'Nie skonfigurowano metody uwierzytelniania', + licenseLost: 'Utrata licencji', + licenseExpired: 'Licencja wygasła', + licenseInactive: 'Licencja nieaktywna', + licenseExpiredTip: 'Licencja Dify Enterprise dla Twojego obszaru roboczego wygasła. Skontaktuj się z administratorem, aby kontynuować korzystanie z Dify.', + licenseLostTip: 'Nie udało się nawiązać połączenia z serwerem licencji Dify. Skontaktuj się z administratorem, aby kontynuować korzystanie z Dify.', + licenseInactiveTip: 'Licencja Dify Enterprise dla Twojego obszaru roboczego jest nieaktywna. Skontaktuj się z administratorem, aby kontynuować korzystanie z Dify.', } export default translation diff --git a/web/i18n/pl-PL/workflow.ts b/web/i18n/pl-PL/workflow.ts index f118f7945c..021cfd2534 100644 --- a/web/i18n/pl-PL/workflow.ts +++ b/web/i18n/pl-PL/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, type: 'Typ', binaryFileVariable: 'Binarna zmienna pliku', + extractListPlaceholder: 'Wprowadź indeks elementu listy, wpisz "/" wstaw zmienną', }, code: { inputVars: 'Zmienne wejściowe', @@ -618,6 +619,7 @@ const translation = { filterCondition: 'Stan filtra', filterConditionComparisonValue: 'Wartość warunku filtru', selectVariableKeyPlaceholder: 'Wybierz klucz zmiennej podrzędnej', + extractsCondition: 'Wyodrębnij element N', }, }, tracing: { diff --git a/web/i18n/pt-BR/common.ts b/web/i18n/pt-BR/common.ts index 431db81d24..d51b24e34d 100644 --- a/web/i18n/pt-BR/common.ts +++ b/web/i18n/pt-BR/common.ts @@ -591,6 +591,10 @@ const translation = { uploadFromComputerLimit: 'Carregar arquivo não pode exceder {{size}}', uploadFromComputerUploadError: 'Falha no upload do arquivo, faça o upload novamente.', }, + license: { + expiring: 'Expirando em um dia', + expiring_plural: 'Expirando em {{count}} dias', + }, } export default translation diff --git a/web/i18n/pt-BR/login.ts b/web/i18n/pt-BR/login.ts index 6c08de45b0..7af5181bb9 100644 --- a/web/i18n/pt-BR/login.ts +++ b/web/i18n/pt-BR/login.ts @@ -99,6 +99,12 @@ const translation = { resetPasswordDesc: 'Digite o e-mail que você usou para se inscrever no Dify e enviaremos um e-mail de redefinição de senha.', sendVerificationCode: 'Enviar código de verificação', usePassword: 'Usar senha', + licenseInactiveTip: 'A licença do Dify Enterprise para seu espaço de trabalho está inativa. Entre em contato com o administrador para continuar usando o Dify.', + licenseLostTip: 'Falha ao conectar o servidor de licenças Dify. Entre em contato com o administrador para continuar usando o Dify.', + licenseExpired: 'Licença expirada', + licenseLost: 'Licença perdida', + licenseInactive: 'Licença inativa', + licenseExpiredTip: 'A licença do Dify Enterprise para seu espaço de trabalho expirou. Entre em contato com o administrador para continuar usando o Dify.', } export default translation diff --git a/web/i18n/pt-BR/workflow.ts b/web/i18n/pt-BR/workflow.ts index 44afda5cd4..de3f10ff6e 100644 --- a/web/i18n/pt-BR/workflow.ts +++ b/web/i18n/pt-BR/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, type: 'Tipo', binaryFileVariable: 'Variável de arquivo binário', + extractListPlaceholder: 'Insira o índice do item da lista, digite \'/\' inserir variável', }, code: { inputVars: 'Variáveis de entrada', @@ -618,6 +619,7 @@ const translation = { filterConditionKey: 'Chave de condição do filtro', filterConditionComparisonOperator: 'Operador de comparação de condição de filtro', filterConditionComparisonValue: 'Valor da condição do filtro', + extractsCondition: 'Extraia o item N', }, }, tracing: { diff --git a/web/i18n/ro-RO/common.ts b/web/i18n/ro-RO/common.ts index 89b965db63..0b7baf37ab 100644 --- a/web/i18n/ro-RO/common.ts +++ b/web/i18n/ro-RO/common.ts @@ -591,6 +591,10 @@ const translation = { uploadFromComputerLimit: 'Încărcarea fișierului nu poate depăși {{size}}', pasteFileLink: 'Lipiți linkul fișierului', }, + license: { + expiring: 'Expiră într-o zi', + expiring_plural: 'Expiră în {{count}} zile', + }, } export default translation diff --git a/web/i18n/ro-RO/login.ts b/web/i18n/ro-RO/login.ts index a60e367ea7..12878d46c0 100644 --- a/web/i18n/ro-RO/login.ts +++ b/web/i18n/ro-RO/login.ts @@ -99,6 +99,12 @@ const translation = { or: 'SAU', resetPasswordDesc: 'Tastați e-mailul pe care l-ați folosit pentru a vă înscrie pe Dify și vă vom trimite un e-mail de resetare a parolei.', changePasswordBtn: 'Setați o parolă', + licenseLostTip: 'Nu s-a reușit conectarea serverului de licențe Dify. Contactați administratorul pentru a continua să utilizați Dify.', + licenseInactive: 'Licență inactivă', + licenseInactiveTip: 'Licența Dify Enterprise pentru spațiul de lucru este inactivă. Contactați administratorul pentru a continua să utilizați Dify.', + licenseExpired: 'Licență expirată', + licenseLost: 'Licență pierdută', + licenseExpiredTip: 'Licența Dify Enterprise pentru spațiul de lucru a expirat. Contactați administratorul pentru a continua să utilizați Dify.', } export default translation diff --git a/web/i18n/ro-RO/workflow.ts b/web/i18n/ro-RO/workflow.ts index d8cd84f730..276ebefa90 100644 --- a/web/i18n/ro-RO/workflow.ts +++ b/web/i18n/ro-RO/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, type: 'Tip', binaryFileVariable: 'Variabilă de fișier binar', + extractListPlaceholder: 'Introduceți indexul elementelor din listă, tastați "/" inserați variabila', }, code: { inputVars: 'Variabile de intrare', @@ -618,6 +619,7 @@ const translation = { limit: 'N de sus', filterConditionComparisonValue: 'Valoare Stare filtrare', asc: 'ASC', + extractsCondition: 'Extrageți elementul N', }, }, tracing: { diff --git a/web/i18n/ru-RU/common.ts b/web/i18n/ru-RU/common.ts index f383b53479..8df7c1ae55 100644 --- a/web/i18n/ru-RU/common.ts +++ b/web/i18n/ru-RU/common.ts @@ -591,6 +591,10 @@ const translation = { uploadFromComputerLimit: 'Файл загрузки не может превышать {{size}}', uploadFromComputerUploadError: 'Загрузка файла не удалась, пожалуйста, загрузите еще раз.', }, + license: { + expiring: 'Срок действия истекает за один день', + expiring_plural: 'Срок действия истекает через {{count}} дней', + }, } export default translation diff --git a/web/i18n/ru-RU/login.ts b/web/i18n/ru-RU/login.ts index 7aba7c4cdd..5c46cb7ff9 100644 --- a/web/i18n/ru-RU/login.ts +++ b/web/i18n/ru-RU/login.ts @@ -99,6 +99,12 @@ const translation = { noLoginMethodTip: 'Обратитесь к системному администратору, чтобы добавить метод аутентификации.', resetPasswordDesc: 'Введите адрес электронной почты, который вы использовали для регистрации в Dify, и мы отправим вам электронное письмо для сброса пароля.', or: 'ИЛИ', + licenseInactive: 'Лицензия неактивна', + licenseLostTip: 'Не удалось подключить сервер лицензий Dify. Обратитесь к своему администратору, чтобы продолжить использование Dify.', + licenseExpired: 'Срок действия лицензии истек', + licenseLost: 'Утеряна лицензия', + licenseInactiveTip: 'Лицензия Dify Enterprise для рабочего пространства неактивна. Обратитесь к своему администратору, чтобы продолжить использование Dify.', + licenseExpiredTip: 'Срок действия лицензии Dify Enterprise для рабочего пространства истек. Обратитесь к своему администратору, чтобы продолжить использование Dify.', } export default translation diff --git a/web/i18n/ru-RU/workflow.ts b/web/i18n/ru-RU/workflow.ts index c822f8c3e5..2fdb6a5f3b 100644 --- a/web/i18n/ru-RU/workflow.ts +++ b/web/i18n/ru-RU/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, type: 'Тип', binaryFileVariable: 'Переменная двоичного файла', + extractListPlaceholder: 'Введите индекс элемента списка, введите \'/\' вставьте переменную', }, code: { inputVars: 'Входные переменные', @@ -618,6 +619,7 @@ const translation = { filterConditionKey: 'Ключ условия фильтра', selectVariableKeyPlaceholder: 'Выбор ключа подпеременной', filterConditionComparisonValue: 'Значение условия фильтра', + extractsCondition: 'Извлечение элемента N', }, }, tracing: { diff --git a/web/i18n/sl-SI/common.ts b/web/i18n/sl-SI/common.ts index c780fbb6c2..2bd148397d 100644 --- a/web/i18n/sl-SI/common.ts +++ b/web/i18n/sl-SI/common.ts @@ -38,6 +38,10 @@ const translation = { duplicate: 'Podvoji', rename: 'Preimenuj', audioSourceUnavailable: 'Zvočni vir ni na voljo', + copyImage: 'Kopiraj sliko', + openInNewTab: 'Odpri v novem zavihku', + zoomOut: 'Pomanjšanje', + zoomIn: 'Povečava', }, errorMsg: { fieldRequired: '{{field}} je obvezno', @@ -576,5 +580,220 @@ const translation = { failed: 'Ustvarjanje oznake ni uspelo', }, }, + dataSource: { + notion: { + selector: { + pageSelected: 'Izbrane strani', + addPages: 'Dodajanje strani', + searchPages: 'Iskanje strani ...', + noSearchResult: 'Ni rezultatov iskanja', + preview: 'PREDOGLED', + }, + connected: 'Povezani', + remove: 'Odstrani', + addWorkspace: 'Dodajanje delovnega prostora', + connectedWorkspace: 'Povezani delovni prostor', + description: 'Uporaba pojma kot vira podatkov za znanje.', + disconnected: 'Odklopi', + pagesAuthorized: 'Dovoljene strani', + title: 'Pojem', + changeAuthorizedPages: 'Spreminjanje pooblaščenih strani', + sync: 'Sinhroniziranje', + }, + website: { + active: 'Dejaven', + configuredCrawlers: 'Konfigurirani pajki', + title: 'Spletna stran', + inactive: 'Neaktiven', + description: 'Uvozite vsebino s spletnih mest s spletnim pajkom.', + with: 'S', + }, + add: 'Dodajanje vira podatkov', + connect: 'Povezati', + configure: 'Konfigurirati', + }, + plugin: { + serpapi: { + apiKeyPlaceholder: 'Vnesite ključ API', + apiKey: 'API ključ', + keyFrom: 'Pridobite svoj ključ SerpAPI na strani računa SerpAPI', + }, + }, + apiBasedExtension: { + selector: { + placeholder: 'Prosimo, izberite razširitev API-ja', + manage: 'Upravljanje razširitve API', + title: 'Razširitev API-ja', + }, + modal: { + name: { + placeholder: 'Prosimo, vnesite ime', + title: 'Ime', + }, + apiEndpoint: { + title: 'Končna točka API-ja', + placeholder: 'Prosimo, vnesite končno točko API-ja', + }, + apiKey: { + lengthError: 'Dolžina ključa API ne sme biti manjša od 5 znakov', + title: 'Ključ API-ja', + placeholder: 'Prosimo, vnesite API-ključ', + }, + editTitle: 'Uredi razširitev API-ja', + title: 'Dodajanje razširitve API-ja', + }, + type: 'Vrsta', + link: 'Preberite, kako razvijete lastno razširitev API-ja.', + title: 'Razširitve API zagotavljajo centralizirano upravljanje API, kar poenostavlja konfiguracijo za enostavno uporabo v aplikacijah Dify.', + linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension', + add: 'Dodajanje razširitve API-ja', + }, + about: { + updateNow: 'Posodobi zdaj', + nowAvailable: 'Dify {{version}} je zdaj na voljo.', + latestAvailable: 'Dify {{version}} je najnovejša različica, ki je na voljo.', + changeLog: 'Dnevnik sprememb', + }, + appMenus: { + apiAccess: 'Dostop do API-ja', + logs: 'Dnevniki', + logAndAnn: 'Dnevniki & Ann.', + promptEng: 'Orkester', + overview: 'Spremljanje', + }, + environment: { + development: 'RAZVOJ', + testing: 'PREIZKUŠANJE', + }, + appModes: { + completionApp: 'Generator besedila', + chatApp: 'Aplikacija za klepet', + }, + datasetMenus: { + documents: 'Dokumentov', + settings: 'Nastavitve', + hitTesting: 'Testiranje pridobivanja', + emptyTip: 'Znanje ni bilo povezano, prosimo, pojdite na aplikacijo ali vtičnik, da dokončate združenje.', + viewDoc: 'Oglejte si dokumentacijo', + relatedApp: 'Povezane aplikacije', + }, + voiceInput: { + notAllow: 'Mikrofon ni pooblaščen', + speaking: 'Spregovorite zdaj ...', + converting: 'Pretvorba v besedilo ...', + }, + modelName: { + 'claude-2': 'Claude-2', + 'gpt-4-32k': 'GPT-4-32K', + 'text-embedding-ada-002': 'Vdelava besedila-Ada-002', + 'gpt-4': 'GPT-4', + 'whisper-1': 'Šepet-1', + 'claude-instant-1': 'Claude-Instant', + 'text-davinci-003': 'Besedilo-Davinci-003', + 'gpt-3.5-turbo-16k': 'GPT-3.5-Turbo-16K', + 'gpt-3.5-turbo': 'GPT-3.5-Turbo', + }, + chat: { + citation: { + vectorHash: 'Vektorska razpršitev:', + hitScore: 'Rezultat pridobivanja:', + linkToDataset: 'Povezava do znanja', + hitCount: 'Število pridobivanja:', + characters: 'Znakov:', + title: 'CITATI', + }, + conversationNameCanNotEmpty: 'Zahtevano ime pogovora', + inputPlaceholder: 'Pogovorite se z botom', + renameConversation: 'Preimenovanje pogovora', + conversationName: 'Ime pogovora', + conversationNamePlaceholder: 'Prosimo, vnesite ime pogovora', + }, + promptEditor: { + context: { + item: { + desc: 'Vstavljanje predloge konteksta', + title: 'Kontekstu', + }, + modal: { + footer: 'Kontekste lahko upravljate v spodnjem razdelku Kontekst.', + title: '{{num}} Znanje v kontekstu', + add: 'Dodajanje konteksta', + }, + }, + history: { + item: { + desc: 'Vstavljanje predloge zgodovinskega sporočila', + title: 'Zgodovina pogovorov', + }, + modal: { + title: 'PRIMER', + edit: 'Urejanje imen vlog v pogovoru', + assistant: 'Zdravo! Kako vam lahko pomagam danes?', + user: 'Zdravo', + }, + }, + variable: { + item: { + desc: 'Vstavljanje spremenljivk in zunanjih orodij', + title: 'Spremenljivke in zunanja orodja', + }, + outputToolDisabledItem: { + title: 'Spremenljivke', + desc: 'Vstavljanje spremenljivk', + }, + modal: { + addTool: 'Novo orodje', + add: 'Nova spremenljivka', + }, + }, + query: { + item: { + title: 'Poizvedba', + desc: 'Vstavljanje predloge uporabniške poizvedbe', + }, + }, + existed: 'Že obstaja v pozivu', + placeholder: 'Tukaj vnesite svojo pozivno besedo, vnesite \'{\' za vstavljanje spremenljivke, vnesite \'/\', da vstavite blok vsebine', + }, + imageUploader: { + pasteImageLinkInvalid: 'Neveljavna povezava do slike', + uploadFromComputerLimit: 'Nalaganje slik ne sme presegati {{size}} MB', + uploadFromComputerUploadError: 'Nalaganje slike ni uspelo, naložite ga znova.', + pasteImageLink: 'Prilepi povezavo do slike', + imageUpload: 'Nalaganje slik', + uploadFromComputer: 'Naloži iz računalnika', + pasteImageLinkInputPlaceholder: 'Tukaj prilepi povezavo do slike', + uploadFromComputerReadError: 'Branje slik ni uspelo, poskusite znova.', + }, + fileUploader: { + fileExtensionNotSupport: 'Datotečna pripona ni podprta', + pasteFileLinkInvalid: 'Neveljavna povezava do datoteke', + pasteFileLink: 'Prilepi povezavo do datoteke', + pasteFileLinkInputPlaceholder: 'Vnesite URL ...', + uploadFromComputerUploadError: 'Nalaganje datoteke ni uspelo, naložite ga znova.', + uploadFromComputer: 'Lokalno nalaganje', + uploadFromComputerLimit: 'Nalaganje {{type}} ne sme presegati {{size}}', + uploadFromComputerReadError: 'Branje datoteke ni uspelo, poskusite znova.', + }, + tag: { + addTag: 'Dodajanje oznak', + delete: 'Brisanje oznake', + manageTags: 'Upravljanje oznak', + addNew: 'Dodajanje nove oznake', + placeholder: 'Vse oznake', + failed: 'Ustvarjanje oznake ni uspelo', + editTag: 'Urejanje oznak', + created: 'Oznaka je bila uspešno ustvarjena', + noTagYet: 'Še ni oznak', + create: 'Ustvariti', + deleteTip: 'Oznaka se uporablja, jo izbrišite?', + noTag: 'Brez oznak', + selectorPlaceholder: 'Vnesite za iskanje ali ustvarjanje', + }, + license: { + expiring_plural: 'Poteče v {{count}} dneh', + expiring: 'Poteče v enem dnevu', + }, } + export default translation diff --git a/web/i18n/sl-SI/dataset-creation.ts b/web/i18n/sl-SI/dataset-creation.ts index 1b24313045..402066ad40 100644 --- a/web/i18n/sl-SI/dataset-creation.ts +++ b/web/i18n/sl-SI/dataset-creation.ts @@ -152,6 +152,7 @@ const translation = { indexSettingTip: 'Če želite spremeniti način indeksiranja in model vdelave, pojdite na ', retrievalSettingTip: 'Če želite spremeniti nastavitve iskanja, pojdite na ', datasetSettingLink: 'nastavitve Znanja.', + maxLengthCheck: 'Največja dolžina kosa mora biti manjša od 4000', }, stepThree: { creationTitle: '🎉 Znanje ustvarjeno', diff --git a/web/i18n/sl-SI/login.ts b/web/i18n/sl-SI/login.ts index 13dfea984d..70350021bc 100644 --- a/web/i18n/sl-SI/login.ts +++ b/web/i18n/sl-SI/login.ts @@ -55,6 +55,7 @@ const translation = { passwordEmpty: 'Geslo je obvezno', passwordLengthInValid: 'Geslo mora vsebovati vsaj 8 znakov', passwordInvalid: 'Geslo mora vsebovati črke in številke, dolžina pa mora biti več kot 8 znakov', + registrationNotAllowed: 'Računa ni mogoče najti. Za registracijo se obrnite na skrbnika sistema.', }, license: { tip: 'Preden začnete z Dify Community Edition, preberite GitHub', @@ -70,6 +71,40 @@ const translation = { activated: 'Prijavite se zdaj', adminInitPassword: 'Geslo za inicializacijo administratorja', validate: 'Potrdi', + checkCode: { + emptyCode: 'Koda je obvezna', + verificationCodePlaceholder: 'Vnesite 6-mestno kodo', + resend: 'Poslati', + verificationCode: 'Koda za preverjanje', + tips: 'Kodo za preverjanje pošljemo na <strong>{{email}}</strong>', + verify: 'Preveriti', + validTime: 'Upoštevajte, da je koda veljavna 5 minut', + checkYourEmail: 'Preverjanje e-pošte', + didNotReceiveCode: 'Niste prejeli kode?', + invalidCode: 'Neveljavna koda', + useAnotherMethod: 'Uporabite drug način', + }, + useVerificationCode: 'Uporaba kode za preverjanje', + licenseInactive: 'Licenca je neaktivna', + changePasswordBtn: 'Nastavitev gesla', + licenseExpired: 'Licenca je potekla', + resetPassword: 'Ponastavi geslo', + back: 'Hrbet', + backToLogin: 'Nazaj na prijavo', + enterYourName: 'Prosimo, vnesite svoje uporabniško ime', + licenseLost: 'Izgubljena licenca', + licenseExpiredTip: 'Licenca za Dify Enterprise za vaš delovni prostor je potekla. Če želite še naprej uporabljati Dify, se obrnite na skrbnika.', + usePassword: 'Uporaba gesla', + sendVerificationCode: 'Pošlji kodo za preverjanje', + resetPasswordDesc: 'Vnesite e-poštni naslov, ki ste ga uporabili za prijavo na Dify, in poslali vam bomo e-poštno sporočilo za ponastavitev gesla.', + setYourAccount: 'Nastavitev računa', + noLoginMethodTip: 'Obrnite se na skrbnika sistema, da dodate način preverjanja pristnosti.', + or: 'ALI', + noLoginMethod: 'Način preverjanja pristnosti ni konfiguriran', + continueWithCode: 'Nadaljujte s kodo', + withSSO: 'Nadaljujte z enotno prijavo', + licenseLostTip: 'Povezava z licenčnim strežnikom Dify ni uspela. Če želite še naprej uporabljati Dify, se obrnite na skrbnika.', + licenseInactiveTip: 'Licenca Dify Enterprise za vaš delovni prostor je neaktivna. Če želite še naprej uporabljati Dify, se obrnite na skrbnika.', } export default translation diff --git a/web/i18n/sl-SI/workflow.ts b/web/i18n/sl-SI/workflow.ts index 767139b741..e5141f06fb 100644 --- a/web/i18n/sl-SI/workflow.ts +++ b/web/i18n/sl-SI/workflow.ts @@ -95,6 +95,10 @@ const translation = { addParallelNode: 'Dodaj vzporedno vozlišče', parallel: 'VZPOREDNO', branch: 'VEJA', + fileUploadTip: 'Funkcije nalaganja slik so nadgrajene na nalaganje datotek.', + featuresDocLink: 'Izvedi več', + featuresDescription: 'Izboljšajte uporabniško izkušnjo spletne aplikacije', + ImageUploadLegacyTip: 'Zdaj lahko ustvarite spremenljivke vrste datoteke v začetnem obrazcu. V prihodnje ne bomo več podpirali funkcije nalaganja slik.', }, env: { envPanelTitle: 'Spremenljivke okolja', @@ -554,6 +558,506 @@ const translation = { tracing: { stopBy: 'Ustavljeno s strani {{user}}', }, + chatVariable: { + modal: { + type: 'Vrsta', + objectValue: 'Privzeta vrednost', + description: 'Opis', + editTitle: 'Urejanje spremenljivke pogovora', + namePlaceholder: 'Ime spremenljivke', + valuePlaceholder: 'Privzeta vrednost, pustite prazno, da ni nastavljeno', + title: 'Dodajanje spremenljivke pogovora', + editInJSON: 'Urejanje v JSON', + value: 'Privzeta vrednost', + oneByOne: 'Dodajanje enega za drugim', + objectKey: 'Ključ', + objectType: 'Vrsta', + arrayValue: 'Vrednost', + name: 'Ime', + descriptionPlaceholder: 'Opis spremenljivke', + editInForm: 'Uredi v obrazcu', + addArrayValue: 'Dodajanje vrednosti', + }, + storedContent: 'Shranjena vsebina', + updatedAt: 'Posodobljeno na', + panelTitle: 'Spremenljivke pogovora', + button: 'Dodajanje spremenljivke', + panelDescription: 'Spremenljivke pogovora se uporabljajo za shranjevanje interaktivnih informacij, ki si jih mora LLM zapomniti, vključno z zgodovino pogovorov, naloženimi datotekami, uporabniškimi nastavitvami. So branje in pisanje.', + docLink: 'Če želite izvedeti več, obiščite naše dokumente.', + }, + changeHistory: { + nodeChange: 'Blokiranje spremenjeno', + placeholder: 'Ničesar še niste spremenili', + nodeDescriptionChange: 'Opis bloka je bil spremenjen', + nodePaste: 'Blokiranje lepljenja', + noteDelete: 'Opomba izbrisana', + nodeDragStop: 'Blok premaknjen', + nodeConnect: 'Blok povezan', + sessionStart: 'Začetek seje', + nodeDelete: 'Blokiraj izbrisane', + stepBackward_other: '{{count}} stopi nazaj', + hint: 'Namig', + noteAdd: 'Opomba dodana', + clearHistory: 'Počisti zgodovino', + stepForward_one: '{{count}} korak naprej', + stepBackward_one: '{{count}} korak nazaj', + nodeAdd: 'Blokiranje dodano', + noteChange: 'Opomba spremenjena', + hintText: 'Dejanjem urejanja se sledi v zgodovini sprememb, ki je shranjena v napravi za čas trajanja te seje. Ta zgodovina bo izbrisana, ko zapustite urejevalnik.', + stepForward_other: '{{count}} koraki naprej', + edgeDelete: 'Blok je prekinjen.', + nodeTitleChange: 'Naslov bloka spremenjen', + nodeResize: 'Spremeni velikost bloka', + title: 'Zgodovina sprememb', + currentState: 'Trenutno stanje', + }, + errorMsg: { + fields: { + code: 'Koda', + variableValue: 'Vrednost spremenljivke', + visionVariable: 'Spremenljivka vida', + model: 'Model', + rerankModel: 'Ponovno razvrsti model', + variable: 'Ime spremenljivke', + }, + invalidJson: '{{field}} je neveljaven JSON', + invalidVariable: 'Neveljavna spremenljivka', + authRequired: 'Dovoljenje je potrebno', + fieldRequired: '{{field}} je obvezno', + rerankModelRequired: 'Preden vklopite Rerank Model, preverite, ali je bil model uspešno konfiguriran v nastavitvah.', + }, + singleRun: { + startRun: 'Začni zagnati', + running: 'Tek', + testRunIteration: 'Ponovitev preskusnega zagona', + iteration: 'Ponovitev', + back: 'Hrbet', + testRun: 'Preskusni zagon', + }, + tabs: { + 'blocks': 'Bloki', + 'workflowTool': 'Potek dela', + 'transform': 'Preoblikovanje', + 'question-understand': 'Vprašanje razumeti', + 'builtInTool': 'Vgrajeno', + 'allTool': 'Ves', + 'tools': 'Orodja', + 'logic': 'Logika', + 'searchBlock': 'Iskalni blok', + 'noResult': 'Ni najdenega ujemanja', + 'customTool': 'Običaj', + 'utilities': 'Utilities', + 'searchTool': 'Orodje za iskanje', + }, + blocks: { + 'variable-aggregator': 'Spremenljivi agregator', + 'code': 'Koda', + 'parameter-extractor': 'Ekstraktor parametrov', + 'llm': 'LLM', + 'knowledge-retrieval': 'Pridobivanje znanja', + 'answer': 'Odgovoriti', + 'end': 'Konec', + 'document-extractor': 'Ekstraktor dokumentov', + 'assigner': 'Dodeljevalnik spremenljivke', + 'iteration-start': 'Začetek ponovitve', + 'template-transform': 'Predloga', + 'iteration': 'Ponovitev', + 'start': 'Začetek', + 'if-else': 'IF/ELSE', + 'list-operator': 'Operater seznama', + 'http-request': 'Zahteva HTTP', + 'variable-assigner': 'Spremenljivi agregator', + 'question-classifier': 'Klasifikator vprašanj', + }, + blocksAbout: { + 'document-extractor': 'Uporablja se za razčlenjevanje naloženih dokumentov v besedilno vsebino, ki je zlahka razumljiva LLM.', + 'list-operator': 'Uporablja se za filtriranje ali razvrščanje vsebine matrike.', + 'template-transform': 'Pretvorite podatke v niz s sintakso predloge Jinja', + 'question-classifier': 'Določite pogoje razvrščanja uporabniških vprašanj, LLM lahko določi, kako poteka pogovor na podlagi opisa klasifikacije', + 'start': 'Določanje začetnih parametrov za zagon poteka dela', + 'if-else': 'Omogoča razdelitev poteka dela na dve veji glede na pogoje if/else', + 'knowledge-retrieval': 'Omogoča poizvedovanje po besedilni vsebini, ki je povezana z uporabniškimi vprašanji iz zbirke znanja', + 'variable-assigner': 'Združite spremenljivke z več vejami v eno spremenljivko za poenoteno konfiguracijo nadaljnjih vozlišč.', + 'code': 'Izvedite kodo Python ali NodeJS za izvajanje logike po meri', + 'answer': 'Določanje vsebine odgovora v pogovoru v klepetu', + 'iteration': 'Izvedite več korakov na predmetu seznama, dokler niso prikazani vsi rezultati.', + 'http-request': 'Dovoli pošiljanje zahtev strežnika prek protokola HTTP', + 'end': 'Določanje končne in končne vrste poteka dela', + 'variable-aggregator': 'Združite spremenljivke z več vejami v eno spremenljivko za poenoteno konfiguracijo nadaljnjih vozlišč.', + 'parameter-extractor': 'Uporabite LLM za pridobivanje strukturiranih parametrov iz naravnega jezika za klicanje orodij ali zahteve HTTP.', + 'assigner': 'Vozlišče za dodeljevanje spremenljivk se uporablja za dodeljevanje vrednosti zapisljivim spremenljivkam (kot so spremenljivke pogovora).', + 'llm': 'Sklicevanje na velike jezikovne modele za odgovarjanje na vprašanja ali obdelavo naravnega jezika', + }, + operator: { + zoomOut: 'Pomanjšanje', + zoomTo100: 'Povečava na 100 %', + zoomToFit: 'Povečaj, da se prilega', + zoomIn: 'Povečava', + zoomTo50: 'Povečava na 50%', + }, + panel: { + helpLink: 'Povezava za pomoč', + organizeBlocks: 'Organiziranje blokov', + optional: '(neobvezno)', + nextStep: 'Naslednji korak', + checklist: 'Kontrolni seznam', + runThisStep: 'Zaženite ta korak', + about: 'Približno', + selectNextStep: 'Izberite Naslednji blok', + changeBlock: 'Spremeni blok', + createdBy: 'Ustvaril', + checklistTip: 'Pred objavo se prepričajte, da so vse težave odpravljene', + userInputField: 'Uporabniško polje za vnos', + checklistResolved: 'Vse težave so odpravljene', + addNextStep: 'Dodajanje naslednjega bloka v ta potek dela', + change: 'Spremeniti', + }, + nodes: { + common: { + memory: { + conversationRoleName: 'Ime vloge pogovora', + memoryTip: 'Nastavitve pomnilnika klepeta', + assistant: 'Predpona pomočnika', + user: 'Uporabniška predpona', + memory: 'Spomin', + windowSize: 'Velikost okna', + }, + memories: { + tip: 'Pomnilnik klepeta', + title: 'Spomine', + builtIn: 'Vgrajeno', + }, + outputVars: 'Izhodne spremenljivke', + insertVarTip: 'Vstavi spremenljivko', + }, + start: { + outputVars: { + memories: { + content: 'Vsebina sporočila', + des: 'Zgodovina pogovorov', + type: 'Vrsta sporočila', + }, + query: 'Uporabniški vnos', + files: 'Seznam datotek', + }, + required: 'Zahteva', + inputField: 'Vnosno polje', + noVarTip: 'Nastavitev vhodov, ki jih je mogoče uporabiti v poteku dela', + builtInVar: 'Vgrajene spremenljivke', + }, + end: { + output: { + variable: 'izhodna spremenljivka', + type: 'Vrsta izhoda', + }, + type: { + 'structured': 'Strukturiran', + 'plain-text': 'Navadno besedilo', + 'none': 'Nobena', + }, + outputs: 'Izhodov', + }, + answer: { + answer: 'Odgovoriti', + outputVars: 'Izhodne spremenljivke', + }, + llm: { + roleDescription: { + assistant: 'Odgovori modela na podlagi sporočil uporabnikov', + system: 'Podajte navodila na visoki ravni za pogovor', + user: 'Navedite navodila, poizvedbe ali kakršen koli besedilni vnos v model', + }, + resolution: { + low: 'Nizek', + high: 'Visok', + name: 'Resolucija', + }, + outputVars: { + usage: 'Informacije o uporabi modela', + output: 'Ustvarjanje vsebine', + }, + singleRun: { + variable: 'Spremenljivka', + }, + notSetContextInPromptTip: 'Če želite omogočiti funkcijo konteksta, izpolnite kontekstno spremenljivko v PROMPT.', + sysQueryInUser: 'sys.query v sporočilu uporabnika je obvezen', + model: 'model', + files: 'Datoteke', + addMessage: 'Dodaj sporočilo', + context: 'Kontekstu', + variables: 'Spremenljivke', + prompt: 'Uren', + vision: 'vid', + contextTooltip: 'Znanje lahko uvozite kot kontekst', + }, + knowledgeRetrieval: { + outputVars: { + title: 'Segmentirani naslov', + url: 'Segmentirani URL', + output: 'Pridobivanje segmentiranih podatkov', + icon: 'Segmentirana ikona', + metadata: 'Drugi metapodatki', + content: 'Segmentirana vsebina', + }, + queryVariable: 'Spremenljivka poizvedbe', + knowledge: 'Znanje', + }, + http: { + outputVars: { + headers: 'JSON seznama glav odgovorov', + body: 'Vsebina odgovora', + files: 'Seznam datotek', + statusCode: 'Koda stanja odgovora', + }, + authorization: { + 'authorization': 'Dovoljenje', + 'header': 'Glava', + 'bearer': 'Nosilec', + 'api-key-title': 'API ključ', + 'basic': 'Osnoven', + 'no-auth': 'Nobena', + 'custom': 'Običaj', + 'authorizationType': 'Vrsta dovoljenja', + 'auth-type': 'Vrsta preverjanja pristnosti', + 'api-key': 'Ključ API-ja', + }, + timeout: { + readPlaceholder: 'Vnos časovne omejitve branja v sekundah', + writePlaceholder: 'Vnesite časovno omejitev pisanja v sekundah', + writeLabel: 'Časovna omejitev pisanja', + connectLabel: 'Časovna omejitev povezave', + title: 'Timeout', + readLabel: 'Časovna omejitev branja', + connectPlaceholder: 'Vnos časovne omejitve povezave v sekundah', + }, + value: 'Vrednost', + key: 'Ključ', + notStartWithHttp: 'API se mora začeti z http:// ali https://', + body: 'Telo', + type: 'Vrsta', + inputVars: 'Vhodne spremenljivke', + bulkEdit: 'Urejanje v velikem obsegu', + insertVarPlaceholder: 'vnesite "/" za vstavljanje spremenljivke', + api: 'API', + keyValueEdit: 'Urejanje ključ-vrednost', + binaryFileVariable: 'Spremenljivka binarne datoteke', + headers: 'Glave', + apiPlaceholder: 'Vnesite URL, vnesite \'/\' vstavi spremenljivko', + extractListPlaceholder: 'Vnesite indeks elementa seznama, vnesite \'/\' vstavi spremenljivko', + params: 'Params', + }, + code: { + inputVars: 'Vhodne spremenljivke', + outputVars: 'Izhodne spremenljivke', + searchDependencies: 'Odvisnosti iskanja', + advancedDependenciesTip: 'Tukaj dodajte nekaj vnaprej naloženih odvisnosti, ki trajajo dlje časa ali niso privzeto vgrajene', + advancedDependencies: 'Napredne odvisnosti', + }, + templateTransform: { + outputVars: { + output: 'Preoblikovana vsebina', + }, + code: 'Koda', + inputVars: 'Vhodne spremenljivke', + codeSupportTip: 'Podpira samo Jinja2', + }, + ifElse: { + comparisonOperator: { + 'all of': 'vse', + 'is not': 'ni', + 'not empty': 'ni prazen', + 'start with': 'Začnite z', + 'is': 'Je', + 'null': 'je nična', + 'not exists': 'ne obstaja', + 'contains': 'Vsebuje', + 'empty': 'je prazen', + 'exists': 'Obstaja', + 'in': 'v', + 'not contains': 'ne vsebuje', + 'end with': 'Končaj z', + 'not in': 'ni v', + 'not null': 'ni nična', + }, + optionName: { + video: 'Video', + doc: 'Doc', + audio: 'Avdio', + image: 'Podoba', + url: 'Spletni naslov', + localUpload: 'Lokalno nalaganje', + }, + and: 'in', + else: 'Drugega', + enterValue: 'Vnesite vrednost', + elseDescription: 'Uporablja se za določanje logike, ki jo je treba izvesti, ko pogoj if ni izpolnjen.', + addCondition: 'Dodajanje pogoja', + if: 'Če', + select: 'Izbrati', + selectVariable: 'Izberite spremenljivko ...', + conditionNotSetup: 'Pogoj NI nastavljen', + addSubVariable: 'Podspremenljivka', + notSetVariable: 'Prosimo, najprej nastavite spremenljivko', + operator: 'Operaterja', + or: 'ali', + }, + variableAssigner: { + type: { + string: 'Niz', + object: 'Predmet', + array: 'Matrika', + number: 'Številka', + }, + outputVars: { + varDescribe: '{{groupName}} izhod', + }, + addGroup: 'Dodajanje skupine', + outputType: 'Vrsta izhoda', + title: 'Dodeljevanje spremenljivk', + noVarTip: 'Seštevanje spremenljivk, ki jih je treba dodeliti', + aggregationGroupTip: 'Če omogočite to funkcijo, lahko združevalnik spremenljivk združi več naborov spremenljivk.', + aggregationGroup: 'Združevalna skupina', + varNotSet: 'Spremenljivka ni nastavljena', + setAssignVariable: 'Nastavitev spremenljivke dodelitve', + }, + assigner: { + 'writeMode': 'Način pisanja', + 'plus': 'Plus', + 'variable': 'Spremenljivka', + 'clear': 'Jasen', + 'append': 'Dodaj', + 'assignedVariable': 'Dodeljena spremenljivka', + 'setVariable': 'Nastavi spremenljivko', + 'over-write': 'Prepisati', + 'writeModeTip': 'Način dodajanja: Na voljo samo za spremenljivke polja.', + }, + tool: { + outputVars: { + files: { + transfer_method: 'Način prenosa. Vrednost je remote_url ali local_file', + upload_file_id: 'Naloži ID datoteke', + type: 'Vrsta podpore. Zdaj podpiramo samo sliko', + url: 'URL slike', + title: 'Datoteke, ustvarjene z orodjem', + }, + json: 'JSON, ustvarjen z orodjem', + text: 'Vsebina, ustvarjena z orodjem', + }, + inputVars: 'Vhodne spremenljivke', + toAuthorize: 'Za odobritev', + }, + questionClassifiers: { + outputVars: { + className: 'Ime razreda', + }, + instruction: 'Navodilo', + classNamePlaceholder: 'Napišite ime svojega razreda', + addClass: 'Dodajanje razreda', + instructionPlaceholder: 'Napišite navodila', + topicName: 'Ime teme', + topicPlaceholder: 'Napišite ime teme', + class: 'Razred', + advancedSetting: 'Napredne nastavitve', + model: 'model', + inputVars: 'Vhodne spremenljivke', + instructionTip: 'Vnesite dodatna navodila, ki bodo klasifikatorju vprašanj pomagala bolje razumeti, kako kategorizirati vprašanja.', + }, + parameterExtractor: { + addExtractParameterContent: { + description: 'Opis', + typePlaceholder: 'Vrsta parametra izvlečka', + requiredContent: 'Zahtevano se uporablja samo kot referenca za sklepanje modela in ne za obvezno validacijo izhodnega parametra.', + required: 'Zahteva', + type: 'Vrsta', + namePlaceholder: 'Izvleček imena parametra', + descriptionPlaceholder: 'Opis parametra izvlečka', + name: 'Ime', + }, + isSuccess: 'Je uspeh.Pri uspehu je vrednost 1, pri neuspehu je vrednost 0.', + addExtractParameter: 'Dodajanje parametra izvlečka', + importFromTool: 'Uvoz iz orodij', + reasoningModeTip: 'Izberete lahko ustrezen način sklepanja glede na sposobnost modela, da se odzove na navodila za klicanje funkcij ali pozive.', + inputVar: 'Vhodna spremenljivka', + advancedSetting: 'Napredne nastavitve', + errorReason: 'Razlog za napako', + reasoningMode: 'Način sklepanja', + instruction: 'Navodilo', + instructionTip: 'Vnesite dodatna navodila, ki bodo ekstraktorju parametrov pomagala razumeti, kako izvleči parametre.', + extractParametersNotSet: 'Izvleček parametrov ni nastavljen', + extractParameters: 'Izvleček parametrov', + }, + iteration: { + ErrorMethod: { + continueOnError: 'Nadaljuj ob napaki', + removeAbnormalOutput: 'Odstranite nenormalen izhod', + operationTerminated: 'Prekinjena', + }, + output: 'Izhodne spremenljivke', + parallelMode: 'Vzporedni način', + MaxParallelismTitle: 'Največji vzporednost', + errorResponseMethod: 'Način odziva na napako', + parallelModeEnableDesc: 'V vzporednem načinu opravila v iteracijah podpirajo vzporedno izvajanje. To lahko konfigurirate na plošči z lastnostmi na desni.', + error_one: '{{štetje}} Napaka', + comma: ',', + parallelModeUpper: 'VZPOREDNI NAČIN', + parallelModeEnableTitle: 'Vzporedni način omogočen', + currentIteration: 'Trenutna ponovitev', + error_other: '{{štetje}} Napake', + input: 'Vhodni', + deleteTitle: 'Izbrisati iteracijsko vozlišče?', + parallelPanelDesc: 'V vzporednem načinu opravila v iteraciji podpirajo vzporedno izvajanje.', + deleteDesc: 'Če izbrišete iteracijsko vozlišče, boste izbrisali vsa podrejena vozlišča', + iteration_other: '{{štetje}} Ponovitev', + answerNodeWarningDesc: 'Opozorilo vzporednega načina: Vozlišča za odgovore, dodelitve spremenljivk pogovora in trajne operacije branja / pisanja v iteracijah lahko povzročijo izjeme.', + MaxParallelismDesc: 'Največja vzporednost se uporablja za nadzor števila nalog, ki se izvajajo hkrati v eni ponovitvi.', + iteration_one: '{{štetje}} Ponovitev', + }, + note: { + editor: { + medium: 'Srednja', + openLink: 'Odprt', + showAuthor: 'Pokaži avtorja', + bold: 'Smel', + strikethrough: 'Prečrtano', + large: 'Velik', + link: 'Povezava', + enterUrl: 'Vnesite URL ...', + small: 'Majhen', + italic: 'Ležeče', + invalidUrl: 'Neveljaven URL', + unlink: 'Prekini povezavo', + placeholder: 'Napišite svojo opombo ...', + bulletList: 'Seznam oznak', + }, + addNote: 'Dodaj opombo', + }, + docExtractor: { + outputVars: { + text: 'Izvlečeno besedilo', + }, + inputVar: 'Vhodna spremenljivka', + learnMore: 'Izvedi več', + supportFileTypes: 'Podporne vrste datotek: {{types}}.', + }, + listFilter: { + outputVars: { + result: 'Rezultat filtriranja', + first_record: 'Prvi zapis', + last_record: 'Zadnji zapis', + }, + extractsCondition: 'Ekstrahiranje elementa N', + selectVariableKeyPlaceholder: 'Izberite ključ podspremenljivke', + asc: 'ASC', + orderBy: 'Naročite po', + filterCondition: 'Pogoj filtra', + filterConditionKey: 'Ključ pogoja filtra', + desc: 'DESC', + limit: 'Vrh N', + filterConditionComparisonOperator: 'Operator za primerjavo pogojev filtra', + inputVar: 'Vhodna spremenljivka', + filterConditionComparisonValue: 'Vrednost pogoja filtra', + }, + }, } export default translation diff --git a/web/i18n/tr-TR/common.ts b/web/i18n/tr-TR/common.ts index 0438a637ce..3a80684560 100644 --- a/web/i18n/tr-TR/common.ts +++ b/web/i18n/tr-TR/common.ts @@ -591,6 +591,10 @@ const translation = { pasteFileLinkInvalid: 'Geçersiz dosya bağlantısı', fileExtensionNotSupport: 'Dosya uzantısı desteklenmiyor', }, + license: { + expiring_plural: '{{count}} gün içinde sona eriyor', + expiring: 'Bir günde sona eriyor', + }, } export default translation diff --git a/web/i18n/tr-TR/login.ts b/web/i18n/tr-TR/login.ts index b6727082a6..e742548dc5 100644 --- a/web/i18n/tr-TR/login.ts +++ b/web/i18n/tr-TR/login.ts @@ -99,6 +99,12 @@ const translation = { noLoginMethodTip: 'Bir kimlik doğrulama yöntemi eklemek için lütfen sistem yöneticisine başvurun.', sendVerificationCode: 'Doğrulama Kodu Gönder', back: 'Geri', + licenseExpiredTip: 'Çalışma alanınız için Dify Enterprise lisansının süresi doldu. Dify\'ı kullanmaya devam etmek için lütfen yöneticinizle iletişime geçin.', + licenseLostTip: 'Dify lisans sunucusuna bağlanılamadı. Dify\'ı kullanmaya devam etmek için lütfen yöneticinizle iletişime geçin.', + licenseInactiveTip: 'Çalışma alanınız için Dify Enterprise lisansı etkin değil. Dify\'ı kullanmaya devam etmek için lütfen yöneticinizle iletişime geçin.', + licenseExpired: 'Lisansın Süresi Doldu', + licenseLost: 'Lisans Kaybedildi', + licenseInactive: 'Lisans Etkin Değil', } export default translation diff --git a/web/i18n/tr-TR/workflow.ts b/web/i18n/tr-TR/workflow.ts index e6e25f6d0e..50dc390d54 100644 --- a/web/i18n/tr-TR/workflow.ts +++ b/web/i18n/tr-TR/workflow.ts @@ -408,6 +408,7 @@ const translation = { }, type: 'Tür', binaryFileVariable: 'İkili Dosya Değişkeni', + extractListPlaceholder: 'Liste öğesi dizinini girin, \'/\' yazın değişken ekle', }, code: { inputVars: 'Giriş Değişkenleri', @@ -619,6 +620,7 @@ const translation = { filterConditionComparisonValue: 'Filtre Koşulu değeri', selectVariableKeyPlaceholder: 'Alt değişken anahtarını seçin', desc: 'DESC', + extractsCondition: 'N öğesini ayıklayın', }, }, tracing: { diff --git a/web/i18n/uk-UA/common.ts b/web/i18n/uk-UA/common.ts index c3d3ef23b3..e3e4a39722 100644 --- a/web/i18n/uk-UA/common.ts +++ b/web/i18n/uk-UA/common.ts @@ -592,6 +592,10 @@ const translation = { uploadFromComputerReadError: 'Не вдалося прочитати файл, будь ласка, спробуйте ще раз.', uploadFromComputerUploadError: 'Не вдалося завантажити файл, будь ласка, завантажте ще раз.', }, + license: { + expiring: 'Термін дії закінчується за один день', + expiring_plural: 'Термін дії закінчується за {{count}} днів', + }, } export default translation diff --git a/web/i18n/uk-UA/login.ts b/web/i18n/uk-UA/login.ts index cdb0b79d8e..e6d1d15dd5 100644 --- a/web/i18n/uk-UA/login.ts +++ b/web/i18n/uk-UA/login.ts @@ -99,6 +99,12 @@ const translation = { noLoginMethodTip: 'Будь ласка, зверніться до адміністратора системи, щоб додати метод автентифікації.', resetPasswordDesc: 'Введіть адресу електронної пошти, яку ви використовували для реєстрації на Dify, і ми надішлемо вам електронний лист для скидання пароля.', resetPassword: 'Скинути пароль', + licenseLostTip: 'Не вдалося підключити сервер ліцензій Dify. Будь ласка, зверніться до свого адміністратора, щоб продовжити користуватися Dify.', + licenseExpired: 'Термін дії ліцензії минув', + licenseInactive: 'Ліцензія неактивна', + licenseLost: 'Ліцензію втрачено', + licenseInactiveTip: 'Ліцензія Dify Enterprise для вашої робочої області неактивна. Будь ласка, зверніться до свого адміністратора, щоб продовжити користуватися Dify.', + licenseExpiredTip: 'Термін дії ліцензії Dify Enterprise для вашого робочого простору закінчився. Будь ласка, зверніться до свого адміністратора, щоб продовжити користуватися Dify.', } export default translation diff --git a/web/i18n/uk-UA/workflow.ts b/web/i18n/uk-UA/workflow.ts index 663b5e4c13..6958e72ce2 100644 --- a/web/i18n/uk-UA/workflow.ts +++ b/web/i18n/uk-UA/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, type: 'Тип', binaryFileVariable: 'Змінна двійкового файлу', + extractListPlaceholder: 'Введіть індекс елемента списку, введіть \'/\' вставити змінну', }, code: { inputVars: 'Вхідні змінні', @@ -618,6 +619,7 @@ const translation = { orderBy: 'Замовити по', filterConditionComparisonOperator: 'Оператор порівняння умов фільтра', filterConditionComparisonValue: 'Значення умови фільтра', + extractsCondition: 'Витягніть елемент N', }, }, tracing: { diff --git a/web/i18n/vi-VN/common.ts b/web/i18n/vi-VN/common.ts index 45282ad5d2..8a25cd52f2 100644 --- a/web/i18n/vi-VN/common.ts +++ b/web/i18n/vi-VN/common.ts @@ -591,6 +591,10 @@ const translation = { uploadFromComputerUploadError: 'Tải lên tệp không thành công, vui lòng tải lên lại.', uploadFromComputerReadError: 'Đọc tệp không thành công, vui lòng thử lại.', }, + license: { + expiring_plural: 'Hết hạn sau {{count}} ngày', + expiring: 'Hết hạn trong một ngày', + }, } export default translation diff --git a/web/i18n/vi-VN/login.ts b/web/i18n/vi-VN/login.ts index a07c1bf807..ab4ab68f48 100644 --- a/web/i18n/vi-VN/login.ts +++ b/web/i18n/vi-VN/login.ts @@ -99,6 +99,12 @@ const translation = { usePassword: 'Sử dụng mật khẩu', resetPassword: 'Đặt lại mật khẩu', sendVerificationCode: 'Gửi mã xác minh', + licenseInactive: 'Giấy phép không hoạt động', + licenseLost: 'Mất giấy phép', + licenseInactiveTip: 'Giấy phép Dify Enterprise cho không gian làm việc của bạn không hoạt động. Vui lòng liên hệ với quản trị viên của bạn để tiếp tục sử dụng Dify.', + licenseExpired: 'Giấy phép đã hết hạn', + licenseExpiredTip: 'Giấy phép Dify Enterprise cho không gian làm việc của bạn đã hết hạn. Vui lòng liên hệ với quản trị viên của bạn để tiếp tục sử dụng Dify.', + licenseLostTip: 'Không thể kết nối máy chủ cấp phép Dify. Vui lòng liên hệ với quản trị viên của bạn để tiếp tục sử dụng Dify.', } export default translation diff --git a/web/i18n/vi-VN/workflow.ts b/web/i18n/vi-VN/workflow.ts index 1176fdd2b5..b90c89cb20 100644 --- a/web/i18n/vi-VN/workflow.ts +++ b/web/i18n/vi-VN/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, binaryFileVariable: 'Biến tệp nhị phân', type: 'Kiểu', + extractListPlaceholder: 'Nhập chỉ mục mục danh sách, nhập \'/\' chèn biến', }, code: { inputVars: 'Biến đầu vào', @@ -618,6 +619,7 @@ const translation = { filterCondition: 'Điều kiện lọc', asc: 'ASC', filterConditionComparisonOperator: 'Toán tử so sánh điều kiện bộ lọc', + extractsCondition: 'Giải nén mục N', }, }, tracing: { diff --git a/web/i18n/zh-Hans/app-api.ts b/web/i18n/zh-Hans/app-api.ts index a0defdab62..f59d9065a6 100644 --- a/web/i18n/zh-Hans/app-api.ts +++ b/web/i18n/zh-Hans/app-api.ts @@ -78,6 +78,7 @@ const translation = { requestBody: 'Request Body', pathParams: 'Path Params', query: 'Query', + toc: '目录', }, } diff --git a/web/i18n/zh-Hans/common.ts b/web/i18n/zh-Hans/common.ts index ab87085652..2c43a2986b 100644 --- a/web/i18n/zh-Hans/common.ts +++ b/web/i18n/zh-Hans/common.ts @@ -27,6 +27,7 @@ const translation = { lineBreak: '换行', sure: '我确定', download: '下载', + viewDetails: '查看详情', delete: '删除', settings: '设置', setup: '设置', @@ -602,6 +603,10 @@ const translation = { created: '标签创建成功', failed: '标签创建失败', }, + license: { + expiring: '许可证还有 1 天到期', + expiring_plural: '许可证还有 {{count}} 天到期', + }, } export default translation diff --git a/web/i18n/zh-Hans/login.ts b/web/i18n/zh-Hans/login.ts index 40697701da..7f64c954b1 100644 --- a/web/i18n/zh-Hans/login.ts +++ b/web/i18n/zh-Hans/login.ts @@ -99,6 +99,12 @@ const translation = { back: '返回', noLoginMethod: '未配置身份认证方式', noLoginMethodTip: '请联系系统管理员添加身份认证方式', + licenseExpired: '许可证已过期', + licenseExpiredTip: '您所在空间的 Dify Enterprise 许可证已过期,请联系管理员以继续使用 Dify。', + licenseLost: '许可证丢失', + licenseLostTip: '无法连接 Dify 许可证服务器,请联系管理员以继续使用 Dify。', + licenseInactive: '许可证未激活', + licenseInactiveTip: '您所在空间的 Dify Enterprise 许可证尚未激活,请联系管理员以继续使用 Dify。', } export default translation diff --git a/web/i18n/zh-Hans/plugin.ts b/web/i18n/zh-Hans/plugin.ts index c1ad4e0d67..b293f99f8f 100644 --- a/web/i18n/zh-Hans/plugin.ts +++ b/web/i18n/zh-Hans/plugin.ts @@ -116,6 +116,8 @@ const translation = { cancel: '取消', back: '返回', next: '下一步', + pluginLoadError: '插件加载错误', + pluginLoadErrorDesc: '此插件将不会被安装', }, installFromGitHub: { installPlugin: '从 GitHub 安装插件', diff --git a/web/i18n/zh-Hans/tools.ts b/web/i18n/zh-Hans/tools.ts index f3ec76aa97..1da5430c37 100644 --- a/web/i18n/zh-Hans/tools.ts +++ b/web/i18n/zh-Hans/tools.ts @@ -153,6 +153,7 @@ const translation = { label: '工具', placeholder: '选择一个工具...', }, + noTools: '没有工具', } export default translation diff --git a/web/i18n/zh-Hant/common.ts b/web/i18n/zh-Hant/common.ts index 184331d6d5..f801a64d10 100644 --- a/web/i18n/zh-Hant/common.ts +++ b/web/i18n/zh-Hant/common.ts @@ -591,6 +591,10 @@ const translation = { fileExtensionNotSupport: '不支援檔擴展名', uploadFromComputerLimit: '上傳文件不能超過 {{size}}', }, + license: { + expiring: '將在1天內過期', + expiring_plural: '將在 {{count}} 天后過期', + }, } export default translation diff --git a/web/i18n/zh-Hant/login.ts b/web/i18n/zh-Hant/login.ts index a35346e71a..6f2b834118 100644 --- a/web/i18n/zh-Hant/login.ts +++ b/web/i18n/zh-Hant/login.ts @@ -99,6 +99,12 @@ const translation = { back: '返回', resetPasswordDesc: '輸入您用於註冊 Dify 的電子郵件,我們將向您發送一封密碼重置電子郵件。', usePassword: '使用密碼', + licenseExpiredTip: '您的工作區的 Dify Enterprise 許可證已過期。請聯繫您的管理員以繼續使用 Dify。', + licenseExpired: '許可證已過期', + licenseLost: '許可證丟失', + licenseInactive: '許可證處於非活動狀態', + licenseInactiveTip: '您的工作區的 Dify Enterprise 許可證處於非活動狀態。請聯繫您的管理員以繼續使用 Dify。', + licenseLostTip: '無法連接 Dify 許可證伺服器。請聯繫您的管理員以繼續使用 Dify。', } export default translation diff --git a/web/i18n/zh-Hant/workflow.ts b/web/i18n/zh-Hant/workflow.ts index f3fbfdedc2..3cd7ad2499 100644 --- a/web/i18n/zh-Hant/workflow.ts +++ b/web/i18n/zh-Hant/workflow.ts @@ -407,6 +407,7 @@ const translation = { }, type: '類型', binaryFileVariable: '二進位檔變數', + extractListPlaceholder: '輸入清單項索引,鍵入 『/』 插入變數', }, code: { inputVars: '輸入變量', @@ -618,6 +619,7 @@ const translation = { selectVariableKeyPlaceholder: 'Select sub variable key (選擇子變數鍵)', filterConditionComparisonOperator: 'Filter Condition Comparison 運算符', filterConditionKey: '篩選條件鍵', + extractsCondition: '提取第 N 項', }, }, tracing: { diff --git a/web/package.json b/web/package.json index ca8652f812..684754f72b 100644 --- a/web/package.json +++ b/web/package.json @@ -1,12 +1,12 @@ { "name": "dify-web", - "version": "0.11.1", + "version": "0.11.2", "private": true, "engines": { "node": ">=18.17.0" }, "scripts": { - "dev": "next dev", + "dev": "NODE_OPTIONS='--inspect' next dev", "build": "next build", "start": "cp -r .next/static .next/standalone/.next/static && cp -r public .next/standalone/public && cross-env PORT=$npm_config_port HOSTNAME=$npm_config_host node .next/standalone/server.js", "lint": "next lint", @@ -44,9 +44,8 @@ "@sentry/utils": "^7.54.0", "@svgdotjs/svg.js": "^3.2.4", "@tailwindcss/typography": "^0.5.15", - "@tanstack/react-query": "^5.59.20", - "@tanstack/react-query-devtools": "^5.59.20", - "@types/hast": "^3.0.4", + "@tanstack/react-query": "^5.60.5", + "@tanstack/react-query-devtools": "^5.60.5", "ahooks": "^3.8.1", "class-variance-authority": "^0.7.0", "classnames": "^2.5.1", diff --git a/web/service/base.ts b/web/service/base.ts index 4519c5f006..38ae5094e0 100644 --- a/web/service/base.ts +++ b/web/service/base.ts @@ -18,7 +18,8 @@ import type { } from '@/types/workflow' import { removeAccessToken } from '@/app/components/share/utils' import type { FetchOptionType, ResponseError } from './fetch' -import { ContentType, base, baseOptions, getPublicToken } from './fetch' +import { ContentType, base, baseOptions, getAccessToken } from './fetch' +import { asyncRunSafe } from '@/utils' const TIME_OUT = 100000 export type IOnDataMoreInfo = { @@ -253,14 +254,7 @@ const baseFetch = base export const upload = (options: any, isPublicAPI?: boolean, url?: string, searchParams?: string): Promise<any> => { const urlPrefix = isPublicAPI ? PUBLIC_API_PREFIX : API_PREFIX - let token = '' - if (isPublicAPI) { - token = getPublicToken() - } - else { - const accessToken = localStorage.getItem('console_token') || '' - token = accessToken - } + const token = getAccessToken(isPublicAPI) const defaultOptions = { method: 'POST', url: (url ? `${urlPrefix}${url}` : `${urlPrefix}/files/upload`) + (searchParams || ''), @@ -351,6 +345,9 @@ export const ssePost = ( if (body) options.body = JSON.stringify(body) + const accessToken = getAccessToken(isPublicAPI) + options.headers!.set('Authorization', `Bearer ${accessToken}`) + globalThis.fetch(urlWithPrefix, options as RequestInit) .then((res) => { if (!/^(2|3)\d{2}$/.test(String(res.status))) { @@ -398,54 +395,78 @@ export const ssePost = ( } // base request -export const request = <T>(url: string, options = {}, otherOptions: IOtherOptions = {}) => { - return new Promise<T>((resolve, reject) => { - baseFetch<T>(url, options, otherOptions).then(resolve).catch((errResp) => { - if (errResp?.status === 401) { - return refreshAccessTokenOrRelogin(TIME_OUT).then(() => { - baseFetch<T>(url, options, otherOptions).then(resolve).catch(reject) - }).catch(() => { - const { - isPublicAPI = false, - silent, - } = otherOptions - const bodyJson = errResp.json() - if (isPublicAPI) { - return bodyJson.then((data: ResponseError) => { - if (data.code === 'web_sso_auth_required') - requiredWebSSOLogin() - - if (data.code === 'unauthorized') { - removeAccessToken() - globalThis.location.reload() - } - - return Promise.reject(data) - }) - } - const loginUrl = `${globalThis.location.origin}/signin` - bodyJson.then((data: ResponseError) => { - if (data.code === 'init_validate_failed' && IS_CE_EDITION && !silent) - Toast.notify({ type: 'error', message: data.message, duration: 4000 }) - else if (data.code === 'not_init_validated' && IS_CE_EDITION) - globalThis.location.href = `${globalThis.location.origin}/init` - else if (data.code === 'not_setup' && IS_CE_EDITION) - globalThis.location.href = `${globalThis.location.origin}/install` - else if (location.pathname !== '/signin' || !IS_CE_EDITION) - globalThis.location.href = loginUrl - else if (!silent) - Toast.notify({ type: 'error', message: data.message }) - }).catch(() => { - // Handle any other errors - globalThis.location.href = loginUrl - }) - }) +export const request = async<T>(url: string, options = {}, otherOptions?: IOtherOptions) => { + try { + const otherOptionsForBaseFetch = otherOptions || {} + const [err, resp] = await asyncRunSafe<T>(baseFetch(url, options, otherOptionsForBaseFetch)) + if (err === null) + return resp + const errResp: Response = err as any + if (errResp.status === 401) { + const [parseErr, errRespData] = await asyncRunSafe<ResponseError>(errResp.json()) + const loginUrl = `${globalThis.location.origin}/signin` + if (parseErr) { + globalThis.location.href = loginUrl + return Promise.reject(err) } - else { - reject(errResp) + // special code + const { code, message } = errRespData + // webapp sso + if (code === 'web_sso_auth_required') { + requiredWebSSOLogin() + return Promise.reject(err) } - }) - }) + if (code === 'unauthorized_and_force_logout') { + localStorage.removeItem('console_token') + localStorage.removeItem('refresh_token') + globalThis.location.reload() + return Promise.reject(err) + } + const { + isPublicAPI = false, + silent, + } = otherOptionsForBaseFetch + if (isPublicAPI && code === 'unauthorized') { + removeAccessToken() + globalThis.location.reload() + return Promise.reject(err) + } + if (code === 'init_validate_failed' && IS_CE_EDITION && !silent) { + Toast.notify({ type: 'error', message, duration: 4000 }) + return Promise.reject(err) + } + if (code === 'not_init_validated' && IS_CE_EDITION) { + globalThis.location.href = `${globalThis.location.origin}/init` + return Promise.reject(err) + } + if (code === 'not_setup' && IS_CE_EDITION) { + globalThis.location.href = `${globalThis.location.origin}/install` + return Promise.reject(err) + } + + // refresh token + const [refreshErr] = await asyncRunSafe(refreshAccessTokenOrRelogin(TIME_OUT)) + if (refreshErr === null) + return baseFetch<T>(url, options, otherOptionsForBaseFetch) + if (location.pathname !== '/signin' || !IS_CE_EDITION) { + globalThis.location.href = loginUrl + return Promise.reject(err) + } + if (!silent) { + Toast.notify({ type: 'error', message }) + return Promise.reject(err) + } + globalThis.location.href = loginUrl + return Promise.reject(err) + } + else { + return Promise.reject(err) + } + } + catch (error) { + console.error(error) + return Promise.reject(error) + } } // request methods diff --git a/web/service/fetch.ts b/web/service/fetch.ts index 0ca804c01e..666a3e2336 100644 --- a/web/service/fetch.ts +++ b/web/service/fetch.ts @@ -72,18 +72,36 @@ export const getPublicToken = () => { try { accessTokenJson = JSON.parse(accessToken) } - catch {} + catch { } token = accessTokenJson[sharedToken] return token || '' } +export function getAccessToken(isPublicAPI?: boolean) { + if (isPublicAPI) { + const sharedToken = globalThis.location.pathname.split('/').slice(-1)[0] + const accessToken = localStorage.getItem('token') || JSON.stringify({ [sharedToken]: '' }) + let accessTokenJson = { [sharedToken]: '' } + try { + accessTokenJson = JSON.parse(accessToken) + } + catch (e) { + + } + return accessTokenJson[sharedToken] + } + else { + return localStorage.getItem('console_token') || '' + } +} + const beforeRequestPublicAuthorization: BeforeRequestHook = (request) => { - const token = getPublicToken() + const token = getAccessToken(true) request.headers.set('Authorization', `Bearer ${token}`) } const beforeRequestAuthorization: BeforeRequestHook = (request) => { - const accessToken = localStorage.getItem('console_token') || '' + const accessToken = getAccessToken() request.headers.set('Authorization', `Bearer ${accessToken}`) } @@ -175,7 +193,7 @@ async function base<T>(url: string, options: FetchOptionType = {}, otherOptions: const contentType = res.headers.get('content-type') if ( contentType - && [ContentType.download, ContentType.audio].includes(contentType) + && [ContentType.download, ContentType.audio].includes(contentType) ) return await res.blob() as T diff --git a/web/service/use-plugins.ts b/web/service/use-plugins.ts index 8c45f35301..0c05290e0f 100644 --- a/web/service/use-plugins.ts +++ b/web/service/use-plugins.ts @@ -2,10 +2,14 @@ import { useCallback, useState } from 'react' import type { DebugInfo as DebugInfoTypes, Dependency, + GitHubItemAndMarketPlaceDependency, InstallPackageResponse, InstalledPluginListResponse, + PackageDependency, Permissions, + Plugin, PluginTask, + PluginsFromMarketplaceByInfoResponse, PluginsFromMarketplaceResponse, VersionListResponse, uploadGitHubResponse, @@ -115,25 +119,53 @@ export const useInstallFromMarketplaceAndGitHub = ({ onSuccess?: (res: { success: boolean }[]) => void }) => { return useMutation({ - mutationFn: (payload: Dependency[]) => { - return Promise.all(payload.map(async (item) => { + mutationFn: (data: { + payload: Dependency[], + plugin: Plugin[], + }) => { + const { payload, plugin } = data + return Promise.all(payload.map(async (item, i) => { try { if (item.type === 'github') { + const data = item as GitHubItemAndMarketPlaceDependency + let pluginId = '' + // From local bundle don't have data.value.github_plugin_unique_identifier + if (!data.value.github_plugin_unique_identifier) { + const { unique_identifier } = await post<uploadGitHubResponse>('/workspaces/current/plugin/upload/github', { + body: { + repo: data.value.repo!, + version: data.value.release! || data.value.version!, + package: data.value.packages! || data.value.package!, + }, + }) + pluginId = unique_identifier + } await post<InstallPackageResponse>('/workspaces/current/plugin/install/github', { body: { - repo: item.value.repo!, - version: item.value.version!, - package: item.value.package!, - plugin_unique_identifier: item.value.github_plugin_unique_identifier!, + repo: data.value.repo!, + version: data.value.release! || data.value.version!, + package: data.value.packages! || data.value.package!, + plugin_unique_identifier: data.value.github_plugin_unique_identifier! || pluginId, + }, + }) + } + if (item.type === 'marketplace') { + const data = item as GitHubItemAndMarketPlaceDependency + + await post<InstallPackageResponse>('/workspaces/current/plugin/install/marketplace', { + body: { + plugin_unique_identifiers: [data.value.plugin_unique_identifier! || plugin[i]?.plugin_id], + }, + }) + } + if (item.type === 'package') { + const data = item as PackageDependency + await post<InstallPackageResponse>('/workspaces/current/plugin/install/pkg', { + body: { + plugin_unique_identifiers: [data.value.unique_identifier], }, }) - return ({ success: true }) } - await post<InstallPackageResponse>('/workspaces/current/plugin/install/marketplace', { - body: { - plugin_unique_identifiers: [item.value.plugin_unique_identifier!], - }, - }) return ({ success: true }) } // eslint-disable-next-line unused-imports/no-unused-vars @@ -217,6 +249,25 @@ export const useFetchPluginsInMarketPlaceByIds = (unique_identifiers: string[]) unique_identifiers, }, }), + enabled: unique_identifiers?.filter(i => !!i).length > 0, + retry: 0, + }) +} + +export const useFetchPluginsInMarketPlaceByInfo = (infos: Record<string, any>[]) => { + return useQuery({ + queryKey: [NAME_SPACE, 'fetchPluginsInMarketPlaceByInfo', infos], + queryFn: () => postMarketplace<{ data: PluginsFromMarketplaceByInfoResponse }>('/plugins/versions/batch', { + body: { + plugin_tuples: infos.map(info => ({ + org: info.organization, + name: info.plugin, + version: info.version, + })), + }, + }), + enabled: infos?.filter(i => !!i).length > 0, + retry: 0, }) } diff --git a/web/service/use-tools.ts b/web/service/use-tools.ts index 36b8c3d120..337f0bbfb8 100644 --- a/web/service/use-tools.ts +++ b/web/service/use-tools.ts @@ -18,7 +18,6 @@ export const useAllToolProviders = () => { return useQuery({ queryKey: useAllToolProvidersKey, queryFn: () => get<Collection[]>('/workspaces/current/tool-providers'), - initialData: [], }) } diff --git a/web/types/feature.ts b/web/types/feature.ts index b60ef0c52a..662405c1dd 100644 --- a/web/types/feature.ts +++ b/web/types/feature.ts @@ -4,6 +4,20 @@ export enum SSOProtocol { OAuth2 = 'oauth2', } +export enum LicenseStatus { + NONE = 'none', + INACTIVE = 'inactive', + ACTIVE = 'active', + EXPIRING = 'expiring', + EXPIRED = 'expired', + LOST = 'lost', +} + +type License = { + status: LicenseStatus + expired_at: string | null +} + export type SystemFeatures = { sso_enforced_for_signin: boolean sso_enforced_for_signin_protocol: SSOProtocol | '' @@ -16,6 +30,7 @@ export type SystemFeatures = { enable_social_oauth_login: boolean is_allow_create_workspace: boolean is_allow_register: boolean + license: License } export const defaultSystemFeatures: SystemFeatures = { @@ -30,4 +45,8 @@ export const defaultSystemFeatures: SystemFeatures = { enable_social_oauth_login: false, is_allow_create_workspace: false, is_allow_register: false, + license: { + status: LicenseStatus.NONE, + expired_at: '', + }, } diff --git a/web/utils/context.ts b/web/utils/context.ts new file mode 100644 index 0000000000..8829a679ce --- /dev/null +++ b/web/utils/context.ts @@ -0,0 +1,45 @@ +import { type Context, type Provider, createContext, useContext } from 'react' +import * as selector from 'use-context-selector' + +const createCreateCtxFunction = ( + useContextImpl: typeof useContext, + createContextImpl: typeof createContext) => { + return function<T>({ name, defaultValue }: CreateCtxOptions<T> = {}): CreateCtxReturn<T> { + const emptySymbol = Symbol(`empty ${name}`) + // @ts-expect-error it's ok here + const context = createContextImpl<T>(defaultValue ?? emptySymbol) + const useContextValue = () => { + const ctx = useContextImpl(context) + if (ctx === emptySymbol) + throw new Error(`No ${name ?? 'related'} context found.`) + + return ctx + } + const result = [context.Provider, useContextValue, context] as CreateCtxReturn<T> + result.context = context + result.provider = context.Provider + result.useContextValue = useContextValue + return result + } +} + +type CreateCtxOptions<T> = { + defaultValue?: T + name?: string +} + +type CreateCtxReturn<T> = [Provider<T>, () => T, Context<T>] & { + context: Context<T> + provider: Provider<T> + useContextValue: () => T +} + +// example +// const [AppProvider, useApp, AppContext] = createCtx<AppContextValue>() + +export const createCtx = createCreateCtxFunction(useContext, createContext) + +export const createSelectorCtx = createCreateCtxFunction( + selector.useContext, + selector.createContext as typeof createContext, +) diff --git a/web/utils/index.ts b/web/utils/index.ts index b8b499ae32..4342a08519 100644 --- a/web/utils/index.ts +++ b/web/utils/index.ts @@ -8,10 +8,8 @@ export async function asyncRunSafe<T = any>(fn: Promise<T>): Promise<[Error] | [ try { return [null, await fn] } - catch (e) { - if (e instanceof Error) - return [e] - return [new Error('unknown error')] + catch (e: any) { + return [e || new Error('unknown error')] } } diff --git a/web/yarn.lock b/web/yarn.lock index 88a57dba70..7a2ba6cc90 100644 --- a/web/yarn.lock +++ b/web/yarn.lock @@ -3108,6 +3108,30 @@ lodash.merge "^4.6.2" postcss-selector-parser "6.0.10" +"@tanstack/query-core@5.60.5": + version "5.60.5" + resolved "https://registry.yarnpkg.com/@tanstack/query-core/-/query-core-5.60.5.tgz#37b7c5ab7e6894cea9ef341299a7a3febc2ea361" + integrity sha512-jiS1aC3XI3BJp83ZiTuDLerTmn9P3U95r6p+6/SNauLJaYxfIC4dMuWygwnBHIZxjn2zJqEpj3nysmPieoxfPQ== + +"@tanstack/query-devtools@5.59.20": + version "5.59.20" + resolved "https://registry.yarnpkg.com/@tanstack/query-devtools/-/query-devtools-5.59.20.tgz#a827ac682ec1268fc9c99e7b6eb739f35b5606aa" + integrity sha512-vxhuQ+8VV4YWQSFxQLsuM+dnEKRY7VeRzpNabFXdhEwsBYLrjXlF1pM38A8WyKNLqZy8JjyRO8oP4Wd/oKHwuQ== + +"@tanstack/react-query-devtools@^5.60.5": + version "5.60.5" + resolved "https://registry.yarnpkg.com/@tanstack/react-query-devtools/-/react-query-devtools-5.60.5.tgz#fe398b4896a292fbe835d3fd4799e929de94c25a" + integrity sha512-lzANl0ih3CNKBGUoXhhkAAHI1Y4Yqs9Jf3iuTUsGiPpmF0RWXTeYFaQxc+h1PhJz3VwYrIYCwmPoNts0mSjSuA== + dependencies: + "@tanstack/query-devtools" "5.59.20" + +"@tanstack/react-query@^5.60.5": + version "5.60.5" + resolved "https://registry.yarnpkg.com/@tanstack/react-query/-/react-query-5.60.5.tgz#3194c390f7eff20542b321c3042880dc3f1a81e2" + integrity sha512-M77bOsPwj1wYE56gk7iJvxGAr4IC12NWdIDhT+Eo8ldkWRHMvIR8I/rufIvT1OXoV/bl7EECwuRuMlxxWtvW2Q== + dependencies: + "@tanstack/query-core" "5.60.5" + "@testing-library/dom@10.4.0": version "10.4.0" resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-10.4.0.tgz#82a9d9462f11d240ecadbf406607c6ceeeff43a8"