diff --git a/api/.env.example b/api/.env.example
index 5751605b48..f8a2812563 100644
--- a/api/.env.example
+++ b/api/.env.example
@@ -42,6 +42,11 @@ REDIS_SENTINEL_USERNAME=
REDIS_SENTINEL_PASSWORD=
REDIS_SENTINEL_SOCKET_TIMEOUT=0.1
+# redis Cluster configuration.
+REDIS_USE_CLUSTERS=false
+REDIS_CLUSTERS=
+REDIS_CLUSTERS_PASSWORD=
+
# PostgreSQL database configuration
DB_USERNAME=postgres
DB_PASSWORD=difyai123456
@@ -234,6 +239,10 @@ ANALYTICDB_ACCOUNT=testaccount
ANALYTICDB_PASSWORD=testpassword
ANALYTICDB_NAMESPACE=dify
ANALYTICDB_NAMESPACE_PASSWORD=difypassword
+ANALYTICDB_HOST=gp-test.aliyuncs.com
+ANALYTICDB_PORT=5432
+ANALYTICDB_MIN_CONNECTION=1
+ANALYTICDB_MAX_CONNECTION=5
# OpenSearch configuration
OPENSEARCH_HOST=127.0.0.1
diff --git a/api/commands.py b/api/commands.py
index 10122ceb3d..23787f38bf 100644
--- a/api/commands.py
+++ b/api/commands.py
@@ -589,7 +589,7 @@ def upgrade_db():
click.echo(click.style("Database migration successful!", fg="green"))
except Exception as e:
- logging.exception(f"Database migration failed: {e}")
+ logging.exception("Failed to execute database migration")
finally:
lock.release()
else:
@@ -633,7 +633,7 @@ where sites.id is null limit 1000"""
except Exception as e:
failed_app_ids.append(app_id)
click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red"))
- logging.exception(f"Fix app related site missing issue failed, error: {e}")
+ logging.exception(f"Failed to fix app related site missing issue, app_id: {app_id}")
continue
if not processed_count:
diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py
index f368a19469..99f86be12e 100644
--- a/api/configs/feature/__init__.py
+++ b/api/configs/feature/__init__.py
@@ -616,6 +616,11 @@ class DataSetConfig(BaseSettings):
default=False,
)
+ PLAN_SANDBOX_CLEAN_MESSAGE_DAY_SETTING: PositiveInt = Field(
+ description="Interval in days for message cleanup operations - plan: sandbox",
+ default=30,
+ )
+
class WorkspaceConfig(BaseSettings):
"""
diff --git a/api/configs/middleware/cache/redis_config.py b/api/configs/middleware/cache/redis_config.py
index 26b9b1347c..2e98c31ec3 100644
--- a/api/configs/middleware/cache/redis_config.py
+++ b/api/configs/middleware/cache/redis_config.py
@@ -68,3 +68,18 @@ class RedisConfig(BaseSettings):
description="Socket timeout in seconds for Redis Sentinel connections",
default=0.1,
)
+
+ REDIS_USE_CLUSTERS: bool = Field(
+ description="Enable Redis Clusters mode for high availability",
+ default=False,
+ )
+
+ REDIS_CLUSTERS: Optional[str] = Field(
+ description="Comma-separated list of Redis Clusters nodes (host:port)",
+ default=None,
+ )
+
+ REDIS_CLUSTERS_PASSWORD: Optional[str] = Field(
+ description="Password for Redis Clusters authentication (if required)",
+ default=None,
+ )
diff --git a/api/configs/middleware/vdb/analyticdb_config.py b/api/configs/middleware/vdb/analyticdb_config.py
index 247a8ea555..53cfaae43e 100644
--- a/api/configs/middleware/vdb/analyticdb_config.py
+++ b/api/configs/middleware/vdb/analyticdb_config.py
@@ -1,6 +1,6 @@
from typing import Optional
-from pydantic import BaseModel, Field
+from pydantic import BaseModel, Field, PositiveInt
class AnalyticdbConfig(BaseModel):
@@ -40,3 +40,11 @@ class AnalyticdbConfig(BaseModel):
description="The password for accessing the specified namespace within the AnalyticDB instance"
" (if namespace feature is enabled).",
)
+ ANALYTICDB_HOST: Optional[str] = Field(
+ default=None, description="The host of the AnalyticDB instance you want to connect to."
+ )
+ ANALYTICDB_PORT: PositiveInt = Field(
+ default=5432, description="The port of the AnalyticDB instance you want to connect to."
+ )
+ ANALYTICDB_MIN_CONNECTION: PositiveInt = Field(default=1, description="Min connection of the AnalyticDB database.")
+ ANALYTICDB_MAX_CONNECTION: PositiveInt = Field(default=5, description="Max connection of the AnalyticDB database.")
diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py
index 65065efbc0..1f2b8224e8 100644
--- a/api/configs/packaging/__init__.py
+++ b/api/configs/packaging/__init__.py
@@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
CURRENT_VERSION: str = Field(
description="Dify version",
- default="0.11.1",
+ default="0.11.2",
)
COMMIT_SHA: str = Field(
diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py
index 36338cbd8a..5a4cd7684f 100644
--- a/api/controllers/console/app/app.py
+++ b/api/controllers/console/app/app.py
@@ -9,6 +9,7 @@ from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_resource_check,
+ enterprise_license_required,
setup_required,
)
from core.ops.ops_trace_manager import OpsTraceManager
@@ -28,6 +29,7 @@ class AppListApi(Resource):
@setup_required
@login_required
@account_initialization_required
+ @enterprise_license_required
def get(self):
"""Get app list"""
@@ -149,6 +151,7 @@ class AppApi(Resource):
@setup_required
@login_required
@account_initialization_required
+ @enterprise_license_required
@get_app_model
@marshal_with(app_detail_fields_with_site)
def get(self, app_model):
diff --git a/api/controllers/console/app/audio.py b/api/controllers/console/app/audio.py
index 112446613f..695b8890e3 100644
--- a/api/controllers/console/app/audio.py
+++ b/api/controllers/console/app/audio.py
@@ -70,7 +70,7 @@ class ChatMessageAudioApi(Resource):
except ValueError as e:
raise e
except Exception as e:
- logging.exception(f"internal server error, {str(e)}.")
+ logging.exception("Failed to handle post request to ChatMessageAudioApi")
raise InternalServerError()
@@ -128,7 +128,7 @@ class ChatMessageTextApi(Resource):
except ValueError as e:
raise e
except Exception as e:
- logging.exception(f"internal server error, {str(e)}.")
+ logging.exception("Failed to handle post request to ChatMessageTextApi")
raise InternalServerError()
@@ -170,7 +170,7 @@ class TextModesApi(Resource):
except ValueError as e:
raise e
except Exception as e:
- logging.exception(f"internal server error, {str(e)}.")
+ logging.exception("Failed to handle get request to TextModesApi")
raise InternalServerError()
diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py
index 735edae5f6..fb32bb2b60 100644
--- a/api/controllers/console/auth/forgot_password.py
+++ b/api/controllers/console/auth/forgot_password.py
@@ -12,7 +12,7 @@ from controllers.console.auth.error import (
InvalidTokenError,
PasswordMismatchError,
)
-from controllers.console.error import EmailSendIpLimitError, NotAllowedRegister
+from controllers.console.error import AccountNotFound, EmailSendIpLimitError
from controllers.console.wraps import setup_required
from events.tenant_event import tenant_was_created
from extensions.ext_database import db
@@ -48,7 +48,7 @@ class ForgotPasswordSendEmailApi(Resource):
token = AccountService.send_reset_password_email(email=args["email"], language=language)
return {"result": "fail", "data": token, "code": "account_not_found"}
else:
- raise NotAllowedRegister()
+ raise AccountNotFound()
else:
token = AccountService.send_reset_password_email(account=account, email=args["email"], language=language)
diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py
index e2e8f84920..f4463ce9cb 100644
--- a/api/controllers/console/auth/login.py
+++ b/api/controllers/console/auth/login.py
@@ -16,9 +16,9 @@ from controllers.console.auth.error import (
)
from controllers.console.error import (
AccountBannedError,
+ AccountNotFound,
EmailSendIpLimitError,
NotAllowedCreateWorkspace,
- NotAllowedRegister,
)
from controllers.console.wraps import setup_required
from events.tenant_event import tenant_was_created
@@ -76,7 +76,7 @@ class LoginApi(Resource):
token = AccountService.send_reset_password_email(email=args["email"], language=language)
return {"result": "fail", "data": token, "code": "account_not_found"}
else:
- raise NotAllowedRegister()
+ raise AccountNotFound()
# SELF_HOSTED only have one workspace
tenants = TenantService.get_join_tenants(account)
if len(tenants) == 0:
@@ -119,7 +119,7 @@ class ResetPasswordSendEmailApi(Resource):
if FeatureService.get_system_features().is_allow_register:
token = AccountService.send_reset_password_email(email=args["email"], language=language)
else:
- raise NotAllowedRegister()
+ raise AccountNotFound()
else:
token = AccountService.send_reset_password_email(account=account, language=language)
@@ -148,7 +148,7 @@ class EmailCodeLoginSendEmailApi(Resource):
if FeatureService.get_system_features().is_allow_register:
token = AccountService.send_email_code_login_email(email=args["email"], language=language)
else:
- raise NotAllowedRegister()
+ raise AccountNotFound()
else:
token = AccountService.send_email_code_login_email(account=account, language=language)
diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py
index 82163a32ee..95d4013e3a 100644
--- a/api/controllers/console/datasets/datasets.py
+++ b/api/controllers/console/datasets/datasets.py
@@ -10,7 +10,7 @@ from controllers.console import api
from controllers.console.apikey import api_key_fields, api_key_list
from controllers.console.app.error import ProviderNotInitializeError
from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError
-from controllers.console.wraps import account_initialization_required, setup_required
+from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
from core.indexing_runner import IndexingRunner
from core.model_runtime.entities.model_entities import ModelType
@@ -44,6 +44,7 @@ class DatasetListApi(Resource):
@setup_required
@login_required
@account_initialization_required
+ @enterprise_license_required
def get(self):
page = request.args.get("page", default=1, type=int)
limit = request.args.get("limit", default=20, type=int)
diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py
index 60848039c5..f38408525a 100644
--- a/api/controllers/console/datasets/datasets_document.py
+++ b/api/controllers/console/datasets/datasets_document.py
@@ -948,7 +948,7 @@ class DocumentRetryApi(DocumentResource):
raise DocumentAlreadyFinishedError()
retry_documents.append(document)
except Exception as e:
- logging.exception(f"Document {document_id} retry failed: {str(e)}")
+ logging.exception(f"Failed to retry document, document id: {document_id}")
continue
# retry document
DocumentService.retry_document(dataset_id, retry_documents)
diff --git a/api/controllers/console/error.py b/api/controllers/console/error.py
index e0630ca66c..1b4e6deae6 100644
--- a/api/controllers/console/error.py
+++ b/api/controllers/console/error.py
@@ -52,8 +52,8 @@ class AccountBannedError(BaseHTTPException):
code = 400
-class NotAllowedRegister(BaseHTTPException):
- error_code = "unauthorized"
+class AccountNotFound(BaseHTTPException):
+ error_code = "account_not_found"
description = "Account not found."
code = 400
@@ -86,3 +86,9 @@ class NoFileUploadedError(BaseHTTPException):
error_code = "no_file_uploaded"
description = "Please upload your file."
code = 400
+
+
+class UnauthorizedAndForceLogout(BaseHTTPException):
+ error_code = "unauthorized_and_force_logout"
+ description = "Unauthorized and force logout."
+ code = 401
diff --git a/api/controllers/console/remote_files.py b/api/controllers/console/remote_files.py
index 9b899bef64..fac1341b39 100644
--- a/api/controllers/console/remote_files.py
+++ b/api/controllers/console/remote_files.py
@@ -45,7 +45,7 @@ class RemoteFileUploadApi(Resource):
resp = ssrf_proxy.head(url=url)
if resp.status_code != httpx.codes.OK:
- resp = ssrf_proxy.get(url=url, timeout=3)
+ resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
resp.raise_for_status()
file_info = helpers.guess_file_info_from_response(resp)
diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py
index aabc417759..750f65168f 100644
--- a/api/controllers/console/workspace/account.py
+++ b/api/controllers/console/workspace/account.py
@@ -14,7 +14,7 @@ from controllers.console.workspace.error import (
InvalidInvitationCodeError,
RepeatPasswordNotMatchError,
)
-from controllers.console.wraps import account_initialization_required, setup_required
+from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required
from extensions.ext_database import db
from fields.member_fields import account_fields
from libs.helper import TimestampField, timezone
@@ -79,6 +79,7 @@ class AccountProfileApi(Resource):
@login_required
@account_initialization_required
@marshal_with(account_fields)
+ @enterprise_license_required
def get(self):
return current_user
diff --git a/api/controllers/console/workspace/members.py b/api/controllers/console/workspace/members.py
index 8f694c65e0..38ed2316a5 100644
--- a/api/controllers/console/workspace/members.py
+++ b/api/controllers/console/workspace/members.py
@@ -1,3 +1,5 @@
+from urllib import parse
+
from flask_login import current_user
from flask_restful import Resource, abort, marshal_with, reqparse
@@ -57,11 +59,12 @@ class MemberInviteEmailApi(Resource):
token = RegisterService.invite_new_member(
inviter.current_tenant, invitee_email, interface_language, role=invitee_role, inviter=inviter
)
+ encoded_invitee_email = parse.quote(invitee_email)
invitation_results.append(
{
"status": "success",
"email": invitee_email,
- "url": f"{console_web_url}/activate?email={invitee_email}&token={token}",
+ "url": f"{console_web_url}/activate?email={encoded_invitee_email}&token={token}",
}
)
except AccountAlreadyInTenantError:
diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py
index 57443cc3b3..f804285f00 100644
--- a/api/controllers/console/workspace/models.py
+++ b/api/controllers/console/workspace/models.py
@@ -72,7 +72,10 @@ class DefaultModelApi(Resource):
model=model_setting["model"],
)
except Exception as ex:
- logging.exception(f"{model_setting['model_type']} save error: {ex}")
+ logging.exception(
+ f"Failed to update default model, model type: {model_setting['model_type']},"
+ f" model:{model_setting.get('model')}"
+ )
raise ex
return {"result": "success"}
@@ -156,7 +159,10 @@ class ModelProviderModelApi(Resource):
credentials=args["credentials"],
)
except CredentialsValidateFailedError as ex:
- logging.exception(f"save model credentials error: {ex}")
+ logging.exception(
+ f"Failed to save model credentials, tenant_id: {tenant_id},"
+ f" model: {args.get('model')}, model_type: {args.get('model_type')}"
+ )
raise ValueError(str(ex))
return {"result": "success"}, 200
diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py
index daadb85d84..9ecda2126d 100644
--- a/api/controllers/console/workspace/tool_providers.py
+++ b/api/controllers/console/workspace/tool_providers.py
@@ -7,7 +7,7 @@ from werkzeug.exceptions import Forbidden
from configs import dify_config
from controllers.console import api
-from controllers.console.wraps import account_initialization_required, setup_required
+from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required
from core.model_runtime.utils.encoders import jsonable_encoder
from libs.helper import alphanumeric, uuid_value
from libs.login import login_required
@@ -549,6 +549,7 @@ class ToolLabelsApi(Resource):
@setup_required
@login_required
@account_initialization_required
+ @enterprise_license_required
def get(self):
return jsonable_encoder(ToolLabelsService.list_tool_labels())
diff --git a/api/controllers/console/wraps.py b/api/controllers/console/wraps.py
index 9f294cb93c..d0df296c24 100644
--- a/api/controllers/console/wraps.py
+++ b/api/controllers/console/wraps.py
@@ -8,10 +8,10 @@ from flask_login import current_user
from configs import dify_config
from controllers.console.workspace.error import AccountNotInitializedError
from models.model import DifySetup
-from services.feature_service import FeatureService
+from services.feature_service import FeatureService, LicenseStatus
from services.operation_service import OperationService
-from .error import NotInitValidateError, NotSetupError
+from .error import NotInitValidateError, NotSetupError, UnauthorizedAndForceLogout
def account_initialization_required(view):
@@ -142,3 +142,15 @@ def setup_required(view):
return view(*args, **kwargs)
return decorated
+
+
+def enterprise_license_required(view):
+ @wraps(view)
+ def decorated(*args, **kwargs):
+ settings = FeatureService.get_system_features()
+ if settings.license.status in [LicenseStatus.INACTIVE, LicenseStatus.EXPIRED, LicenseStatus.LOST]:
+ raise UnauthorizedAndForceLogout("Your license is invalid. Please contact your administrator.")
+
+ return view(*args, **kwargs)
+
+ return decorated
diff --git a/api/controllers/web/audio.py b/api/controllers/web/audio.py
index 23550efe2e..e8521307ad 100644
--- a/api/controllers/web/audio.py
+++ b/api/controllers/web/audio.py
@@ -59,7 +59,7 @@ class AudioApi(WebApiResource):
except ValueError as e:
raise e
except Exception as e:
- logging.exception(f"internal server error: {str(e)}")
+ logging.exception("Failed to handle post request to AudioApi")
raise InternalServerError()
@@ -117,7 +117,7 @@ class TextApi(WebApiResource):
except ValueError as e:
raise e
except Exception as e:
- logging.exception(f"internal server error: {str(e)}")
+ logging.exception("Failed to handle post request to TextApi")
raise InternalServerError()
diff --git a/api/core/app/app_config/features/file_upload/manager.py b/api/core/app/app_config/features/file_upload/manager.py
index 2043ea0e41..0dc4efc47a 100644
--- a/api/core/app/app_config/features/file_upload/manager.py
+++ b/api/core/app/app_config/features/file_upload/manager.py
@@ -16,9 +16,7 @@ class FileUploadConfigManager:
file_upload_dict = config.get("file_upload")
if file_upload_dict:
if file_upload_dict.get("enabled"):
- transform_methods = file_upload_dict.get("allowed_file_upload_methods") or file_upload_dict.get(
- "allowed_upload_methods", []
- )
+ transform_methods = file_upload_dict.get("allowed_file_upload_methods", [])
data = {
"image_config": {
"number_limits": file_upload_dict["number_limits"],
diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py
index 0b88345061..00e5a74732 100644
--- a/api/core/app/apps/advanced_chat/app_generator.py
+++ b/api/core/app/apps/advanced_chat/app_generator.py
@@ -362,5 +362,5 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
if e.args[0] == "I/O operation on closed file.": # ignore this error
raise GenerateTaskStoppedError()
else:
- logger.exception(e)
+ logger.exception(f"Failed to process generate task pipeline, conversation_id: {conversation.id}")
raise e
diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py
index 1d4c0ea0fa..e1798957b9 100644
--- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py
+++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py
@@ -242,7 +242,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
start_listener_time = time.time()
yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id)
except Exception as e:
- logger.exception(e)
+ logger.exception(f"Failed to listen audio message, task_id: {task_id}")
break
if tts_publisher:
yield MessageAudioEndStreamResponse(audio="", task_id=task_id)
diff --git a/api/core/app/apps/base_app_generator.py b/api/core/app/apps/base_app_generator.py
index 6e6da95401..2c78d95778 100644
--- a/api/core/app/apps/base_app_generator.py
+++ b/api/core/app/apps/base_app_generator.py
@@ -33,8 +33,8 @@ class BaseAppGenerator:
tenant_id=app_config.tenant_id,
config=FileUploadConfig(
allowed_file_types=entity_dictionary[k].allowed_file_types,
- allowed_extensions=entity_dictionary[k].allowed_file_extensions,
- allowed_upload_methods=entity_dictionary[k].allowed_file_upload_methods,
+ allowed_file_extensions=entity_dictionary[k].allowed_file_extensions,
+ allowed_file_upload_methods=entity_dictionary[k].allowed_file_upload_methods,
),
)
for k, v in user_inputs.items()
@@ -47,8 +47,8 @@ class BaseAppGenerator:
tenant_id=app_config.tenant_id,
config=FileUploadConfig(
allowed_file_types=entity_dictionary[k].allowed_file_types,
- allowed_extensions=entity_dictionary[k].allowed_file_extensions,
- allowed_upload_methods=entity_dictionary[k].allowed_file_upload_methods,
+ allowed_file_extensions=entity_dictionary[k].allowed_file_extensions,
+ allowed_file_upload_methods=entity_dictionary[k].allowed_file_upload_methods,
),
)
for k, v in user_inputs.items()
@@ -91,6 +91,9 @@ class BaseAppGenerator:
)
if variable_entity.type == VariableEntityType.NUMBER and isinstance(value, str):
+ # handle empty string case
+ if not value.strip():
+ return None
# may raise ValueError if user_input_value is not a valid number
try:
if "." in value:
diff --git a/api/core/app/apps/message_based_app_generator.py b/api/core/app/apps/message_based_app_generator.py
index bae64368e3..da206f01e7 100644
--- a/api/core/app/apps/message_based_app_generator.py
+++ b/api/core/app/apps/message_based_app_generator.py
@@ -80,7 +80,7 @@ class MessageBasedAppGenerator(BaseAppGenerator):
if e.args[0] == "I/O operation on closed file.": # ignore this error
raise GenerateTaskStoppedError()
else:
- logger.exception(e)
+ logger.exception(f"Failed to handle response, conversation_id: {conversation.id}")
raise e
def _get_conversation_by_user(
diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py
index a0080ece20..65da39b220 100644
--- a/api/core/app/apps/workflow/app_generator.py
+++ b/api/core/app/apps/workflow/app_generator.py
@@ -298,5 +298,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
if e.args[0] == "I/O operation on closed file.": # ignore this error
raise GenerateTaskStoppedError()
else:
- logger.exception(e)
+ logger.exception(
+ f"Fails to process generate task pipeline, task_id: {application_generate_entity.task_id}"
+ )
raise e
diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py
index aaa4824fe8..9e4921d6a2 100644
--- a/api/core/app/apps/workflow/generate_task_pipeline.py
+++ b/api/core/app/apps/workflow/generate_task_pipeline.py
@@ -216,7 +216,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
else:
yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id)
except Exception as e:
- logger.exception(e)
+ logger.exception(f"Fails to get audio trunk, task_id: {task_id}")
break
if tts_publisher:
yield MessageAudioEndStreamResponse(audio="", task_id=task_id)
diff --git a/api/core/app/task_pipeline/message_cycle_manage.py b/api/core/app/task_pipeline/message_cycle_manage.py
index 236eebf0b8..e818a090ed 100644
--- a/api/core/app/task_pipeline/message_cycle_manage.py
+++ b/api/core/app/task_pipeline/message_cycle_manage.py
@@ -86,7 +86,7 @@ class MessageCycleManage:
conversation.name = name
except Exception as e:
if dify_config.DEBUG:
- logging.exception(f"generate conversation name failed: {e}")
+ logging.exception(f"generate conversation name failed, conversation_id: {conversation_id}")
pass
db.session.merge(conversation)
diff --git a/api/core/file/models.py b/api/core/file/models.py
index 0142893787..3e7e189c62 100644
--- a/api/core/file/models.py
+++ b/api/core/file/models.py
@@ -28,8 +28,8 @@ class FileUploadConfig(BaseModel):
image_config: Optional[ImageConfig] = None
allowed_file_types: Sequence[FileType] = Field(default_factory=list)
- allowed_extensions: Sequence[str] = Field(default_factory=list)
- allowed_upload_methods: Sequence[FileTransferMethod] = Field(default_factory=list)
+ allowed_file_extensions: Sequence[str] = Field(default_factory=list)
+ allowed_file_upload_methods: Sequence[FileTransferMethod] = Field(default_factory=list)
number_limits: int = 0
diff --git a/api/core/helper/moderation.py b/api/core/helper/moderation.py
index b880590de2..da0fd0031c 100644
--- a/api/core/helper/moderation.py
+++ b/api/core/helper/moderation.py
@@ -41,7 +41,7 @@ def check_moderation(model_config: ModelConfigWithCredentialsEntity, text: str)
if moderation_result is True:
return True
except Exception as ex:
- logger.exception(ex)
+ logger.exception(f"Fails to check moderation, provider_name: {provider_name}")
raise InvokeBadRequestError("Rate limit exceeded, please try again later.")
return False
diff --git a/api/core/helper/module_import_helper.py b/api/core/helper/module_import_helper.py
index e6e1491548..1e2fefce88 100644
--- a/api/core/helper/module_import_helper.py
+++ b/api/core/helper/module_import_helper.py
@@ -29,7 +29,7 @@ def import_module_from_source(*, module_name: str, py_file_path: AnyStr, use_laz
spec.loader.exec_module(module)
return module
except Exception as e:
- logging.exception(f"Failed to load module {module_name} from {py_file_path}: {str(e)}")
+ logging.exception(f"Failed to load module {module_name} from script file '{py_file_path}'")
raise e
diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py
index 64d73c7cd9..2e457dd530 100644
--- a/api/core/helper/ssrf_proxy.py
+++ b/api/core/helper/ssrf_proxy.py
@@ -39,6 +39,7 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
)
retries = 0
+ stream = kwargs.pop("stream", False)
while retries <= max_retries:
try:
if dify_config.SSRF_PROXY_ALL_URL:
@@ -52,6 +53,8 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
response = client.request(method=method, url=url, **kwargs)
if response.status_code not in STATUS_FORCELIST:
+ if stream:
+ return response.iter_bytes()
return response
else:
logging.warning(
diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py
index e2a94073cf..7db8f54f70 100644
--- a/api/core/indexing_runner.py
+++ b/api/core/indexing_runner.py
@@ -29,6 +29,7 @@ from core.rag.splitter.fixed_text_splitter import (
FixedRecursiveCharacterTextSplitter,
)
from core.rag.splitter.text_splitter import TextSplitter
+from core.tools.utils.text_processing_utils import remove_leading_symbols
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from extensions.ext_storage import storage
@@ -500,11 +501,7 @@ class IndexingRunner:
document_node.metadata["doc_hash"] = hash
# delete Splitter character
page_content = document_node.page_content
- if page_content.startswith(".") or page_content.startswith("。"):
- page_content = page_content[1:]
- else:
- page_content = page_content
- document_node.page_content = page_content
+ document_node.page_content = remove_leading_symbols(page_content)
if document_node.page_content:
split_documents.append(document_node)
@@ -554,7 +551,7 @@ class IndexingRunner:
qa_documents.append(qa_document)
format_documents.extend(qa_documents)
except Exception as e:
- logging.exception(e)
+ logging.exception("Failed to format qa document")
all_qa_documents.extend(format_documents)
diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py
index 9cf9ed75c0..3a92c8d9d2 100644
--- a/api/core/llm_generator/llm_generator.py
+++ b/api/core/llm_generator/llm_generator.py
@@ -102,7 +102,7 @@ class LLMGenerator:
except InvokeError:
questions = []
except Exception as e:
- logging.exception(e)
+ logging.exception("Failed to generate suggested questions after answer")
questions = []
return questions
@@ -148,7 +148,7 @@ class LLMGenerator:
error = str(e)
error_step = "generate rule config"
except Exception as e:
- logging.exception(e)
+ logging.exception(f"Failed to generate rule config, model: {model_config.get('name')}")
rule_config["error"] = str(e)
rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else ""
@@ -234,7 +234,7 @@ class LLMGenerator:
error_step = "generate conversation opener"
except Exception as e:
- logging.exception(e)
+ logging.exception(f"Failed to generate rule config, model: {model_config.get('name')}")
rule_config["error"] = str(e)
rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else ""
@@ -286,7 +286,9 @@ class LLMGenerator:
error = str(e)
return {"code": "", "language": code_language, "error": f"Failed to generate code. Error: {error}"}
except Exception as e:
- logging.exception(e)
+ logging.exception(
+ f"Failed to invoke LLM model, model: {model_config.get('name')}, language: {code_language}"
+ )
return {"code": "", "language": code_language, "error": f"An unexpected error occurred: {str(e)}"}
@classmethod
diff --git a/api/core/model_runtime/model_providers/anthropic/llm/llm.py b/api/core/model_runtime/model_providers/anthropic/llm/llm.py
index 3a5a42ba05..4e7faab891 100644
--- a/api/core/model_runtime/model_providers/anthropic/llm/llm.py
+++ b/api/core/model_runtime/model_providers/anthropic/llm/llm.py
@@ -325,14 +325,13 @@ class AnthropicLargeLanguageModel(LargeLanguageModel):
assistant_prompt_message.tool_calls.append(tool_call)
# calculate num tokens
- if response.usage:
- # transform usage
- prompt_tokens = response.usage.input_tokens
- completion_tokens = response.usage.output_tokens
- else:
- # calculate num tokens
- prompt_tokens = self.get_num_tokens(model, credentials, prompt_messages)
- completion_tokens = self.get_num_tokens(model, credentials, [assistant_prompt_message])
+ prompt_tokens = (response.usage and response.usage.input_tokens) or self.get_num_tokens(
+ model, credentials, prompt_messages
+ )
+
+ completion_tokens = (response.usage and response.usage.output_tokens) or self.get_num_tokens(
+ model, credentials, [assistant_prompt_message]
+ )
# transform usage
usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens)
diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py b/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py
index 84672520e0..9b75285e40 100644
--- a/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py
+++ b/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py
@@ -103,7 +103,7 @@ class AzureRerankModel(RerankModel):
return RerankResult(model=model, docs=rerank_documents)
except Exception as e:
- logger.exception(f"Exception in Azure rerank: {e}")
+ logger.exception(f"Failed to invoke rerank model, model: {model}")
raise
def validate_credentials(self, model: str, credentials: dict) -> None:
diff --git a/api/core/model_runtime/model_providers/bedrock/llm/llm.py b/api/core/model_runtime/model_providers/bedrock/llm/llm.py
index ff0403ee47..ef4dfaf6f1 100644
--- a/api/core/model_runtime/model_providers/bedrock/llm/llm.py
+++ b/api/core/model_runtime/model_providers/bedrock/llm/llm.py
@@ -2,13 +2,11 @@
import base64
import json
import logging
-import mimetypes
from collections.abc import Generator
from typing import Optional, Union, cast
# 3rd import
import boto3
-import requests
from botocore.config import Config
from botocore.exceptions import (
ClientError,
@@ -439,22 +437,10 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
sub_messages.append(sub_message_dict)
elif message_content.type == PromptMessageContentType.IMAGE:
message_content = cast(ImagePromptMessageContent, message_content)
- if not message_content.data.startswith("data:"):
- # fetch image data from url
- try:
- url = message_content.data
- image_content = requests.get(url).content
- if "?" in url:
- url = url.split("?")[0]
- mime_type, _ = mimetypes.guess_type(url)
- base64_data = base64.b64encode(image_content).decode("utf-8")
- except Exception as ex:
- raise ValueError(f"Failed to fetch image data from url {message_content.data}, {ex}")
- else:
- data_split = message_content.data.split(";base64,")
- mime_type = data_split[0].replace("data:", "")
- base64_data = data_split[1]
- image_content = base64.b64decode(base64_data)
+ data_split = message_content.data.split(";base64,")
+ mime_type = data_split[0].replace("data:", "")
+ base64_data = data_split[1]
+ image_content = base64.b64decode(base64_data)
if mime_type not in {"image/jpeg", "image/png", "image/gif", "image/webp"}:
raise ValueError(
diff --git a/api/core/model_runtime/model_providers/google/llm/_position.yaml b/api/core/model_runtime/model_providers/google/llm/_position.yaml
index 63b9ca3a29..ab3081db38 100644
--- a/api/core/model_runtime/model_providers/google/llm/_position.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/_position.yaml
@@ -11,5 +11,6 @@
- gemini-1.5-flash-exp-0827
- gemini-1.5-flash-8b-exp-0827
- gemini-1.5-flash-8b-exp-0924
+- gemini-exp-1114
- gemini-pro
- gemini-pro-vision
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-001.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-001.yaml
index 8d8cd24847..2e68fa8e6f 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-001.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-001.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-002.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-002.yaml
index ae6b85cb23..9f44504e89 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-002.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-002.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0827.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0827.yaml
index bbc697e934..a3da9095e1 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0827.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0827.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0924.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0924.yaml
index 890faf8c3f..19373e4993 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0924.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-8b-exp-0924.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-exp-0827.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-exp-0827.yaml
index c5695e5dda..ca1f0b39b2 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-exp-0827.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-exp-0827.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-latest.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-latest.yaml
index d1c264c3a7..24e8c3a74f 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-latest.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash-latest.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash.yaml
index 6b794e9bee..fa3e814fc3 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-flash.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-001.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-001.yaml
index 9ac5e3ad1b..da125e6fab 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-001.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-001.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-002.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-002.yaml
index f1d01d0763..f683e54d3b 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-002.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-002.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0801.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0801.yaml
index 0a918e0d7b..c67c156bdb 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0801.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0801.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0827.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0827.yaml
index 7452ce46e7..56059fd799 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0827.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-exp-0827.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-latest.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-latest.yaml
index 65c2d97e92..ec376f3186 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-latest.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro-latest.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro.yaml
index 12620b57b6..8394cdfb56 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-1.5-pro.yaml
@@ -24,14 +24,13 @@ parameter_rules:
zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
en_US: Only sample from the top K options for each subsequent token.
required: false
- - name: max_tokens_to_sample
+ - name: max_output_tokens
use_template: max_tokens
- required: true
default: 8192
min: 1
max: 8192
- - name: response_format
- use_template: response_format
+ - name: json_schema
+ use_template: json_schema
pricing:
input: '0.00'
output: '0.00'
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-exp-1114.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-exp-1114.yaml
new file mode 100644
index 0000000000..f126627689
--- /dev/null
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-exp-1114.yaml
@@ -0,0 +1,38 @@
+model: gemini-exp-1114
+label:
+ en_US: Gemini exp 1114
+model_type: llm
+features:
+ - agent-thought
+ - vision
+ - tool-call
+ - stream-tool-call
+model_properties:
+ mode: chat
+ context_size: 2097152
+parameter_rules:
+ - name: temperature
+ use_template: temperature
+ - name: top_p
+ use_template: top_p
+ - name: top_k
+ label:
+ zh_Hans: 取样数量
+ en_US: Top k
+ type: int
+ help:
+ zh_Hans: 仅从每个后续标记的前 K 个选项中采样。
+ en_US: Only sample from the top K options for each subsequent token.
+ required: false
+ - name: max_output_tokens
+ use_template: max_tokens
+ default: 8192
+ min: 1
+ max: 8192
+ - name: json_schema
+ use_template: json_schema
+pricing:
+ input: '0.00'
+ output: '0.00'
+ unit: '0.000001'
+ currency: USD
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-pro-vision.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-pro-vision.yaml
index 075e484e46..5b589745d7 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-pro-vision.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-pro-vision.yaml
@@ -32,3 +32,4 @@ pricing:
output: '0.00'
unit: '0.000001'
currency: USD
+deprecated: true
diff --git a/api/core/model_runtime/model_providers/google/llm/gemini-pro.yaml b/api/core/model_runtime/model_providers/google/llm/gemini-pro.yaml
index 4e9f59e7da..f05fec8c5d 100644
--- a/api/core/model_runtime/model_providers/google/llm/gemini-pro.yaml
+++ b/api/core/model_runtime/model_providers/google/llm/gemini-pro.yaml
@@ -36,3 +36,4 @@ pricing:
output: '0.00'
unit: '0.000001'
currency: USD
+deprecated: true
diff --git a/api/core/model_runtime/model_providers/google/llm/llm.py b/api/core/model_runtime/model_providers/google/llm/llm.py
index b1b07a611b..754f056ac1 100644
--- a/api/core/model_runtime/model_providers/google/llm/llm.py
+++ b/api/core/model_runtime/model_providers/google/llm/llm.py
@@ -1,7 +1,6 @@
import base64
import io
import json
-import logging
from collections.abc import Generator
from typing import Optional, Union, cast
@@ -36,17 +35,6 @@ from core.model_runtime.errors.invoke import (
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
-logger = logging.getLogger(__name__)
-
-GEMINI_BLOCK_MODE_PROMPT = """You should always follow the instructions and output a valid {{block}} object.
-The structure of the {{block}} object you can found in the instructions, use {"answer": "$your_answer"} as the default structure
-if you are not sure about the structure.
-
-
-{{instructions}}
-
-""" # noqa: E501
-
class GoogleLargeLanguageModel(LargeLanguageModel):
def _invoke(
@@ -155,7 +143,7 @@ class GoogleLargeLanguageModel(LargeLanguageModel):
try:
ping_message = SystemPromptMessage(content="ping")
- self._generate(model, credentials, [ping_message], {"max_tokens_to_sample": 5})
+ self._generate(model, credentials, [ping_message], {"max_output_tokens": 5})
except Exception as ex:
raise CredentialsValidateFailedError(str(ex))
@@ -184,7 +172,15 @@ class GoogleLargeLanguageModel(LargeLanguageModel):
:return: full response or stream response chunk generator result
"""
config_kwargs = model_parameters.copy()
- config_kwargs["max_output_tokens"] = config_kwargs.pop("max_tokens_to_sample", None)
+ if schema := config_kwargs.pop("json_schema", None):
+ try:
+ schema = json.loads(schema)
+ except:
+ raise exceptions.InvalidArgument("Invalid JSON Schema")
+ if tools:
+ raise exceptions.InvalidArgument("gemini not support use Tools and JSON Schema at same time")
+ config_kwargs["response_schema"] = schema
+ config_kwargs["response_mime_type"] = "application/json"
if stop:
config_kwargs["stop_sequences"] = stop
diff --git a/api/core/model_runtime/model_providers/ollama/llm/llm.py b/api/core/model_runtime/model_providers/ollama/llm/llm.py
index a7ea53e0e9..094a674645 100644
--- a/api/core/model_runtime/model_providers/ollama/llm/llm.py
+++ b/api/core/model_runtime/model_providers/ollama/llm/llm.py
@@ -22,6 +22,7 @@ from core.model_runtime.entities.message_entities import (
PromptMessageTool,
SystemPromptMessage,
TextPromptMessageContent,
+ ToolPromptMessage,
UserPromptMessage,
)
from core.model_runtime.entities.model_entities import (
@@ -86,6 +87,7 @@ class OllamaLargeLanguageModel(LargeLanguageModel):
credentials=credentials,
prompt_messages=prompt_messages,
model_parameters=model_parameters,
+ tools=tools,
stop=stop,
stream=stream,
user=user,
@@ -153,6 +155,7 @@ class OllamaLargeLanguageModel(LargeLanguageModel):
credentials: dict,
prompt_messages: list[PromptMessage],
model_parameters: dict,
+ tools: Optional[list[PromptMessageTool]] = None,
stop: Optional[list[str]] = None,
stream: bool = True,
user: Optional[str] = None,
@@ -196,6 +199,8 @@ class OllamaLargeLanguageModel(LargeLanguageModel):
if completion_type is LLMMode.CHAT:
endpoint_url = urljoin(endpoint_url, "api/chat")
data["messages"] = [self._convert_prompt_message_to_dict(m) for m in prompt_messages]
+ if tools:
+ data["tools"] = [self._convert_prompt_message_tool_to_dict(tool) for tool in tools]
else:
endpoint_url = urljoin(endpoint_url, "api/generate")
first_prompt_message = prompt_messages[0]
@@ -232,7 +237,7 @@ class OllamaLargeLanguageModel(LargeLanguageModel):
if stream:
return self._handle_generate_stream_response(model, credentials, completion_type, response, prompt_messages)
- return self._handle_generate_response(model, credentials, completion_type, response, prompt_messages)
+ return self._handle_generate_response(model, credentials, completion_type, response, prompt_messages, tools)
def _handle_generate_response(
self,
@@ -241,6 +246,7 @@ class OllamaLargeLanguageModel(LargeLanguageModel):
completion_type: LLMMode,
response: requests.Response,
prompt_messages: list[PromptMessage],
+ tools: Optional[list[PromptMessageTool]],
) -> LLMResult:
"""
Handle llm completion response
@@ -253,14 +259,16 @@ class OllamaLargeLanguageModel(LargeLanguageModel):
:return: llm result
"""
response_json = response.json()
-
+ tool_calls = []
if completion_type is LLMMode.CHAT:
message = response_json.get("message", {})
response_content = message.get("content", "")
+ response_tool_calls = message.get("tool_calls", [])
+ tool_calls = [self._extract_response_tool_call(tool_call) for tool_call in response_tool_calls]
else:
response_content = response_json["response"]
- assistant_message = AssistantPromptMessage(content=response_content)
+ assistant_message = AssistantPromptMessage(content=response_content, tool_calls=tool_calls)
if "prompt_eval_count" in response_json and "eval_count" in response_json:
# transform usage
@@ -405,9 +413,28 @@ class OllamaLargeLanguageModel(LargeLanguageModel):
chunk_index += 1
+ def _convert_prompt_message_tool_to_dict(self, tool: PromptMessageTool) -> dict:
+ """
+ Convert PromptMessageTool to dict for Ollama API
+
+ :param tool: tool
+ :return: tool dict
+ """
+ return {
+ "type": "function",
+ "function": {
+ "name": tool.name,
+ "description": tool.description,
+ "parameters": tool.parameters,
+ },
+ }
+
def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict:
"""
Convert PromptMessage to dict for Ollama API
+
+ :param message: prompt message
+ :return: message dict
"""
if isinstance(message, UserPromptMessage):
message = cast(UserPromptMessage, message)
@@ -432,6 +459,9 @@ class OllamaLargeLanguageModel(LargeLanguageModel):
elif isinstance(message, SystemPromptMessage):
message = cast(SystemPromptMessage, message)
message_dict = {"role": "system", "content": message.content}
+ elif isinstance(message, ToolPromptMessage):
+ message = cast(ToolPromptMessage, message)
+ message_dict = {"role": "tool", "content": message.content}
else:
raise ValueError(f"Got unknown type {message}")
@@ -452,6 +482,29 @@ class OllamaLargeLanguageModel(LargeLanguageModel):
return num_tokens
+ def _extract_response_tool_call(self, response_tool_call: dict) -> AssistantPromptMessage.ToolCall:
+ """
+ Extract response tool call
+ """
+ tool_call = None
+ if response_tool_call and "function" in response_tool_call:
+ # Convert arguments to JSON string if it's a dict
+ arguments = response_tool_call.get("function").get("arguments")
+ if isinstance(arguments, dict):
+ arguments = json.dumps(arguments)
+
+ function = AssistantPromptMessage.ToolCall.ToolCallFunction(
+ name=response_tool_call.get("function").get("name"),
+ arguments=arguments,
+ )
+ tool_call = AssistantPromptMessage.ToolCall(
+ id=response_tool_call.get("function").get("name"),
+ type="function",
+ function=function,
+ )
+
+ return tool_call
+
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity:
"""
Get customizable model schema.
@@ -461,10 +514,15 @@ class OllamaLargeLanguageModel(LargeLanguageModel):
:return: model schema
"""
- extras = {}
+ extras = {
+ "features": [],
+ }
if "vision_support" in credentials and credentials["vision_support"] == "true":
- extras["features"] = [ModelFeature.VISION]
+ extras["features"].append(ModelFeature.VISION)
+ if "function_call_support" in credentials and credentials["function_call_support"] == "true":
+ extras["features"].append(ModelFeature.TOOL_CALL)
+ extras["features"].append(ModelFeature.MULTI_TOOL_CALL)
entity = AIModelEntity(
model=model,
diff --git a/api/core/model_runtime/model_providers/ollama/ollama.yaml b/api/core/model_runtime/model_providers/ollama/ollama.yaml
index 33747753bd..6560fcd180 100644
--- a/api/core/model_runtime/model_providers/ollama/ollama.yaml
+++ b/api/core/model_runtime/model_providers/ollama/ollama.yaml
@@ -96,3 +96,22 @@ model_credential_schema:
label:
en_US: 'No'
zh_Hans: 否
+ - variable: function_call_support
+ label:
+ zh_Hans: 是否支持函数调用
+ en_US: Function call support
+ show_on:
+ - variable: __model_type
+ value: llm
+ default: 'false'
+ type: radio
+ required: false
+ options:
+ - value: 'true'
+ label:
+ en_US: 'Yes'
+ zh_Hans: 是
+ - value: 'false'
+ label:
+ en_US: 'No'
+ zh_Hans: 否
diff --git a/api/core/model_runtime/model_providers/openai/llm/llm.py b/api/core/model_runtime/model_providers/openai/llm/llm.py
index 68317d7179..f16f81c125 100644
--- a/api/core/model_runtime/model_providers/openai/llm/llm.py
+++ b/api/core/model_runtime/model_providers/openai/llm/llm.py
@@ -615,19 +615,11 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel):
prompt_messages = self._clear_illegal_prompt_messages(model, prompt_messages)
# o1 compatibility
- block_as_stream = False
if model.startswith("o1"):
if "max_tokens" in model_parameters:
model_parameters["max_completion_tokens"] = model_parameters["max_tokens"]
del model_parameters["max_tokens"]
- if stream:
- block_as_stream = True
- stream = False
-
- if "stream_options" in extra_model_kwargs:
- del extra_model_kwargs["stream_options"]
-
if "stop" in extra_model_kwargs:
del extra_model_kwargs["stop"]
@@ -644,47 +636,7 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel):
if stream:
return self._handle_chat_generate_stream_response(model, credentials, response, prompt_messages, tools)
- block_result = self._handle_chat_generate_response(model, credentials, response, prompt_messages, tools)
-
- if block_as_stream:
- return self._handle_chat_block_as_stream_response(block_result, prompt_messages, stop)
-
- return block_result
-
- def _handle_chat_block_as_stream_response(
- self,
- block_result: LLMResult,
- prompt_messages: list[PromptMessage],
- stop: Optional[list[str]] = None,
- ) -> Generator[LLMResultChunk, None, None]:
- """
- Handle llm chat response
-
- :param model: model name
- :param credentials: credentials
- :param response: response
- :param prompt_messages: prompt messages
- :param tools: tools for tool calling
- :param stop: stop words
- :return: llm response chunk generator
- """
- text = block_result.message.content
- text = cast(str, text)
-
- if stop:
- text = self.enforce_stop_tokens(text, stop)
-
- yield LLMResultChunk(
- model=block_result.model,
- prompt_messages=prompt_messages,
- system_fingerprint=block_result.system_fingerprint,
- delta=LLMResultChunkDelta(
- index=0,
- message=AssistantPromptMessage(content=text),
- finish_reason="stop",
- usage=block_result.usage,
- ),
- )
+ return self._handle_chat_generate_response(model, credentials, response, prompt_messages, tools)
def _handle_chat_generate_response(
self,
diff --git a/api/core/model_runtime/model_providers/openrouter/llm/llm.py b/api/core/model_runtime/model_providers/openrouter/llm/llm.py
index 736ab8e7a8..2d6ece8113 100644
--- a/api/core/model_runtime/model_providers/openrouter/llm/llm.py
+++ b/api/core/model_runtime/model_providers/openrouter/llm/llm.py
@@ -45,19 +45,7 @@ class OpenRouterLargeLanguageModel(OAIAPICompatLargeLanguageModel):
user: Optional[str] = None,
) -> Union[LLMResult, Generator]:
self._update_credential(model, credentials)
-
- block_as_stream = False
- if model.startswith("openai/o1"):
- block_as_stream = True
- stop = None
-
- # invoke block as stream
- if stream and block_as_stream:
- return self._generate_block_as_stream(
- model, credentials, prompt_messages, model_parameters, tools, stop, user
- )
- else:
- return super()._generate(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user)
+ return super()._generate(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user)
def _generate_block_as_stream(
self,
@@ -69,9 +57,7 @@ class OpenRouterLargeLanguageModel(OAIAPICompatLargeLanguageModel):
stop: Optional[list[str]] = None,
user: Optional[str] = None,
) -> Generator:
- resp: LLMResult = super()._generate(
- model, credentials, prompt_messages, model_parameters, tools, stop, False, user
- )
+ resp = super()._generate(model, credentials, prompt_messages, model_parameters, tools, stop, False, user)
yield LLMResultChunk(
model=model,
diff --git a/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py b/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py
index 49c3fa5921..df797bae26 100644
--- a/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py
+++ b/api/core/model_runtime/model_providers/sagemaker/rerank/rerank.py
@@ -113,7 +113,7 @@ class SageMakerRerankModel(RerankModel):
return RerankResult(model=model, docs=rerank_documents)
except Exception as e:
- logger.exception(f"Exception {e}, line : {line}")
+ logger.exception(f"Failed to invoke rerank model, model: {model}")
def validate_credentials(self, model: str, credentials: dict) -> None:
"""
diff --git a/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py b/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py
index 8fdf68abe1..2d50e9c7b4 100644
--- a/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py
+++ b/api/core/model_runtime/model_providers/sagemaker/speech2text/speech2text.py
@@ -78,7 +78,7 @@ class SageMakerSpeech2TextModel(Speech2TextModel):
json_obj = json.loads(json_str)
asr_text = json_obj["text"]
except Exception as e:
- logger.exception(f"failed to invoke speech2text model, {e}")
+ logger.exception(f"failed to invoke speech2text model, model: {model}")
raise CredentialsValidateFailedError(str(e))
return asr_text
diff --git a/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py
index ececfda11a..ef4ddcd6a7 100644
--- a/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py
+++ b/api/core/model_runtime/model_providers/sagemaker/text_embedding/text_embedding.py
@@ -117,7 +117,7 @@ class SageMakerEmbeddingModel(TextEmbeddingModel):
return TextEmbeddingResult(embeddings=all_embeddings, usage=usage, model=model)
except Exception as e:
- logger.exception(f"Exception {e}, line : {line}")
+ logger.exception(f"Failed to invoke text embedding model, model: {model}, line: {line}")
def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int:
"""
diff --git a/api/core/model_runtime/model_providers/tongyi/rerank/rerank.py b/api/core/model_runtime/model_providers/tongyi/rerank/rerank.py
index c9245bd82d..a5ce9ead6e 100644
--- a/api/core/model_runtime/model_providers/tongyi/rerank/rerank.py
+++ b/api/core/model_runtime/model_providers/tongyi/rerank/rerank.py
@@ -65,6 +65,8 @@ class GTERerankModel(RerankModel):
)
rerank_documents = []
+ if not response.output:
+ return RerankResult(model=model, docs=rerank_documents)
for _, result in enumerate(response.output.results):
# format document
rerank_document = RerankDocument(
diff --git a/api/core/moderation/keywords/keywords.py b/api/core/moderation/keywords/keywords.py
index 4846da8f93..00b3c56c03 100644
--- a/api/core/moderation/keywords/keywords.py
+++ b/api/core/moderation/keywords/keywords.py
@@ -1,3 +1,6 @@
+from collections.abc import Sequence
+from typing import Any
+
from core.moderation.base import Moderation, ModerationAction, ModerationInputsResult, ModerationOutputsResult
@@ -62,5 +65,5 @@ class KeywordsModeration(Moderation):
def _is_violated(self, inputs: dict, keywords_list: list) -> bool:
return any(self._check_keywords_in_value(keywords_list, value) for value in inputs.values())
- def _check_keywords_in_value(self, keywords_list, value) -> bool:
- return any(keyword.lower() in value.lower() for keyword in keywords_list)
+ def _check_keywords_in_value(self, keywords_list: Sequence[str], value: Any) -> bool:
+ return any(keyword.lower() in str(value).lower() for keyword in keywords_list)
diff --git a/api/core/moderation/output_moderation.py b/api/core/moderation/output_moderation.py
index 83f4d2d57d..4635bd9c25 100644
--- a/api/core/moderation/output_moderation.py
+++ b/api/core/moderation/output_moderation.py
@@ -126,6 +126,6 @@ class OutputModeration(BaseModel):
result: ModerationOutputsResult = moderation_factory.moderation_for_outputs(moderation_buffer)
return result
except Exception as e:
- logger.exception("Moderation Output error: %s", e)
+ logger.exception(f"Moderation Output error, app_id: {app_id}")
return None
diff --git a/api/core/ops/langsmith_trace/entities/langsmith_trace_entity.py b/api/core/ops/langsmith_trace/entities/langsmith_trace_entity.py
index 05c932fb99..16c76f363c 100644
--- a/api/core/ops/langsmith_trace/entities/langsmith_trace_entity.py
+++ b/api/core/ops/langsmith_trace/entities/langsmith_trace_entity.py
@@ -49,6 +49,7 @@ class LangSmithRunModel(LangSmithTokenUsage, LangSmithMultiModel):
reference_example_id: Optional[str] = Field(None, description="Reference example ID associated with the run")
input_attachments: Optional[dict[str, Any]] = Field(None, description="Input attachments of the run")
output_attachments: Optional[dict[str, Any]] = Field(None, description="Output attachments of the run")
+ dotted_order: Optional[str] = Field(None, description="Dotted order of the run")
@field_validator("inputs", "outputs")
@classmethod
diff --git a/api/core/ops/langsmith_trace/langsmith_trace.py b/api/core/ops/langsmith_trace/langsmith_trace.py
index ad45050405..c15b132abd 100644
--- a/api/core/ops/langsmith_trace/langsmith_trace.py
+++ b/api/core/ops/langsmith_trace/langsmith_trace.py
@@ -25,7 +25,7 @@ from core.ops.langsmith_trace.entities.langsmith_trace_entity import (
LangSmithRunType,
LangSmithRunUpdateModel,
)
-from core.ops.utils import filter_none_values
+from core.ops.utils import filter_none_values, generate_dotted_order
from extensions.ext_database import db
from models.model import EndUser, MessageFile
from models.workflow import WorkflowNodeExecution
@@ -62,6 +62,16 @@ class LangSmithDataTrace(BaseTraceInstance):
self.generate_name_trace(trace_info)
def workflow_trace(self, trace_info: WorkflowTraceInfo):
+ trace_id = trace_info.message_id or trace_info.workflow_app_log_id or trace_info.workflow_run_id
+ message_dotted_order = (
+ generate_dotted_order(trace_info.message_id, trace_info.start_time) if trace_info.message_id else None
+ )
+ workflow_dotted_order = generate_dotted_order(
+ trace_info.workflow_app_log_id or trace_info.workflow_run_id,
+ trace_info.workflow_data.created_at,
+ message_dotted_order,
+ )
+
if trace_info.message_id:
message_run = LangSmithRunModel(
id=trace_info.message_id,
@@ -76,6 +86,8 @@ class LangSmithDataTrace(BaseTraceInstance):
},
tags=["message", "workflow"],
error=trace_info.error,
+ trace_id=trace_id,
+ dotted_order=message_dotted_order,
)
self.add_run(message_run)
@@ -95,6 +107,8 @@ class LangSmithDataTrace(BaseTraceInstance):
error=trace_info.error,
tags=["workflow"],
parent_run_id=trace_info.message_id or None,
+ trace_id=trace_id,
+ dotted_order=workflow_dotted_order,
)
self.add_run(langsmith_run)
@@ -177,6 +191,7 @@ class LangSmithDataTrace(BaseTraceInstance):
else:
run_type = LangSmithRunType.tool
+ node_dotted_order = generate_dotted_order(node_execution_id, created_at, workflow_dotted_order)
langsmith_run = LangSmithRunModel(
total_tokens=node_total_tokens,
name=node_type,
@@ -191,6 +206,9 @@ class LangSmithDataTrace(BaseTraceInstance):
},
parent_run_id=trace_info.workflow_app_log_id or trace_info.workflow_run_id,
tags=["node_execution"],
+ id=node_execution_id,
+ trace_id=trace_id,
+ dotted_order=node_dotted_order,
)
self.add_run(langsmith_run)
diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py
index 79704c115f..1069889abd 100644
--- a/api/core/ops/ops_trace_manager.py
+++ b/api/core/ops/ops_trace_manager.py
@@ -711,7 +711,7 @@ class TraceQueueManager:
trace_task.app_id = self.app_id
trace_manager_queue.put(trace_task)
except Exception as e:
- logging.exception(f"Error adding trace task: {e}")
+ logging.exception(f"Error adding trace task, trace_type {trace_task.trace_type}")
finally:
self.start_timer()
@@ -730,7 +730,7 @@ class TraceQueueManager:
if tasks:
self.send_to_celery(tasks)
except Exception as e:
- logging.exception(f"Error processing trace tasks: {e}")
+ logging.exception("Error processing trace tasks")
def start_timer(self):
global trace_manager_timer
diff --git a/api/core/ops/utils.py b/api/core/ops/utils.py
index 3cd3fb5756..998eba9ea9 100644
--- a/api/core/ops/utils.py
+++ b/api/core/ops/utils.py
@@ -1,5 +1,6 @@
from contextlib import contextmanager
from datetime import datetime
+from typing import Optional, Union
from extensions.ext_database import db
from models.model import Message
@@ -43,3 +44,19 @@ def replace_text_with_content(data):
return [replace_text_with_content(item) for item in data]
else:
return data
+
+
+def generate_dotted_order(
+ run_id: str, start_time: Union[str, datetime], parent_dotted_order: Optional[str] = None
+) -> str:
+ """
+ generate dotted_order for langsmith
+ """
+ start_time = datetime.fromisoformat(start_time) if isinstance(start_time, str) else start_time
+ timestamp = start_time.strftime("%Y%m%dT%H%M%S%f")[:-3] + "Z"
+ current_segment = f"{timestamp}{run_id}"
+
+ if parent_dotted_order is None:
+ return current_segment
+
+ return f"{parent_dotted_order}.{current_segment}"
diff --git a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector.py b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector.py
index c77cb87376..09104ae422 100644
--- a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector.py
+++ b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector.py
@@ -1,310 +1,62 @@
import json
from typing import Any
-from pydantic import BaseModel
-
-_import_err_msg = (
- "`alibabacloud_gpdb20160503` and `alibabacloud_tea_openapi` packages not found, "
- "please run `pip install alibabacloud_gpdb20160503 alibabacloud_tea_openapi`"
-)
-
from configs import dify_config
+from core.rag.datasource.vdb.analyticdb.analyticdb_vector_openapi import (
+ AnalyticdbVectorOpenAPI,
+ AnalyticdbVectorOpenAPIConfig,
+)
+from core.rag.datasource.vdb.analyticdb.analyticdb_vector_sql import AnalyticdbVectorBySql, AnalyticdbVectorBySqlConfig
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.embedding.embedding_base import Embeddings
from core.rag.models.document import Document
-from extensions.ext_redis import redis_client
from models.dataset import Dataset
-class AnalyticdbConfig(BaseModel):
- access_key_id: str
- access_key_secret: str
- region_id: str
- instance_id: str
- account: str
- account_password: str
- namespace: str = ("dify",)
- namespace_password: str = (None,)
- metrics: str = ("cosine",)
- read_timeout: int = 60000
-
- def to_analyticdb_client_params(self):
- return {
- "access_key_id": self.access_key_id,
- "access_key_secret": self.access_key_secret,
- "region_id": self.region_id,
- "read_timeout": self.read_timeout,
- }
-
-
class AnalyticdbVector(BaseVector):
- def __init__(self, collection_name: str, config: AnalyticdbConfig):
- self._collection_name = collection_name.lower()
- try:
- from alibabacloud_gpdb20160503.client import Client
- from alibabacloud_tea_openapi import models as open_api_models
- except:
- raise ImportError(_import_err_msg)
- self.config = config
- self._client_config = open_api_models.Config(user_agent="dify", **config.to_analyticdb_client_params())
- self._client = Client(self._client_config)
- self._initialize()
-
- def _initialize(self) -> None:
- cache_key = f"vector_indexing_{self.config.instance_id}"
- lock_name = f"{cache_key}_lock"
- with redis_client.lock(lock_name, timeout=20):
- collection_exist_cache_key = f"vector_indexing_{self.config.instance_id}"
- if redis_client.get(collection_exist_cache_key):
- return
- self._initialize_vector_database()
- self._create_namespace_if_not_exists()
- redis_client.set(collection_exist_cache_key, 1, ex=3600)
-
- def _initialize_vector_database(self) -> None:
- from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
-
- request = gpdb_20160503_models.InitVectorDatabaseRequest(
- dbinstance_id=self.config.instance_id,
- region_id=self.config.region_id,
- manager_account=self.config.account,
- manager_account_password=self.config.account_password,
- )
- self._client.init_vector_database(request)
-
- def _create_namespace_if_not_exists(self) -> None:
- from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
- from Tea.exceptions import TeaException
-
- try:
- request = gpdb_20160503_models.DescribeNamespaceRequest(
- dbinstance_id=self.config.instance_id,
- region_id=self.config.region_id,
- namespace=self.config.namespace,
- manager_account=self.config.account,
- manager_account_password=self.config.account_password,
- )
- self._client.describe_namespace(request)
- except TeaException as e:
- if e.statusCode == 404:
- request = gpdb_20160503_models.CreateNamespaceRequest(
- dbinstance_id=self.config.instance_id,
- region_id=self.config.region_id,
- manager_account=self.config.account,
- manager_account_password=self.config.account_password,
- namespace=self.config.namespace,
- namespace_password=self.config.namespace_password,
- )
- self._client.create_namespace(request)
- else:
- raise ValueError(f"failed to create namespace {self.config.namespace}: {e}")
-
- def _create_collection_if_not_exists(self, embedding_dimension: int):
- from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
- from Tea.exceptions import TeaException
-
- cache_key = f"vector_indexing_{self._collection_name}"
- lock_name = f"{cache_key}_lock"
- with redis_client.lock(lock_name, timeout=20):
- collection_exist_cache_key = f"vector_indexing_{self._collection_name}"
- if redis_client.get(collection_exist_cache_key):
- return
- try:
- request = gpdb_20160503_models.DescribeCollectionRequest(
- dbinstance_id=self.config.instance_id,
- region_id=self.config.region_id,
- namespace=self.config.namespace,
- namespace_password=self.config.namespace_password,
- collection=self._collection_name,
- )
- self._client.describe_collection(request)
- except TeaException as e:
- if e.statusCode == 404:
- metadata = '{"ref_doc_id":"text","page_content":"text","metadata_":"jsonb"}'
- full_text_retrieval_fields = "page_content"
- request = gpdb_20160503_models.CreateCollectionRequest(
- dbinstance_id=self.config.instance_id,
- region_id=self.config.region_id,
- manager_account=self.config.account,
- manager_account_password=self.config.account_password,
- namespace=self.config.namespace,
- collection=self._collection_name,
- dimension=embedding_dimension,
- metrics=self.config.metrics,
- metadata=metadata,
- full_text_retrieval_fields=full_text_retrieval_fields,
- )
- self._client.create_collection(request)
- else:
- raise ValueError(f"failed to create collection {self._collection_name}: {e}")
- redis_client.set(collection_exist_cache_key, 1, ex=3600)
+ def __init__(
+ self, collection_name: str, api_config: AnalyticdbVectorOpenAPIConfig, sql_config: AnalyticdbVectorBySqlConfig
+ ):
+ super().__init__(collection_name)
+ if api_config is not None:
+ self.analyticdb_vector = AnalyticdbVectorOpenAPI(collection_name, api_config)
+ else:
+ self.analyticdb_vector = AnalyticdbVectorBySql(collection_name, sql_config)
def get_type(self) -> str:
return VectorType.ANALYTICDB
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
dimension = len(embeddings[0])
- self._create_collection_if_not_exists(dimension)
- self.add_texts(texts, embeddings)
+ self.analyticdb_vector._create_collection_if_not_exists(dimension)
+ self.analyticdb_vector.add_texts(texts, embeddings)
- def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
- from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
-
- rows: list[gpdb_20160503_models.UpsertCollectionDataRequestRows] = []
- for doc, embedding in zip(documents, embeddings, strict=True):
- metadata = {
- "ref_doc_id": doc.metadata["doc_id"],
- "page_content": doc.page_content,
- "metadata_": json.dumps(doc.metadata),
- }
- rows.append(
- gpdb_20160503_models.UpsertCollectionDataRequestRows(
- vector=embedding,
- metadata=metadata,
- )
- )
- request = gpdb_20160503_models.UpsertCollectionDataRequest(
- dbinstance_id=self.config.instance_id,
- region_id=self.config.region_id,
- namespace=self.config.namespace,
- namespace_password=self.config.namespace_password,
- collection=self._collection_name,
- rows=rows,
- )
- self._client.upsert_collection_data(request)
+ def add_texts(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
+ self.analyticdb_vector.add_texts(texts, embeddings)
def text_exists(self, id: str) -> bool:
- from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
-
- request = gpdb_20160503_models.QueryCollectionDataRequest(
- dbinstance_id=self.config.instance_id,
- region_id=self.config.region_id,
- namespace=self.config.namespace,
- namespace_password=self.config.namespace_password,
- collection=self._collection_name,
- metrics=self.config.metrics,
- include_values=True,
- vector=None,
- content=None,
- top_k=1,
- filter=f"ref_doc_id='{id}'",
- )
- response = self._client.query_collection_data(request)
- return len(response.body.matches.match) > 0
+ return self.analyticdb_vector.text_exists(id)
def delete_by_ids(self, ids: list[str]) -> None:
- from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
-
- ids_str = ",".join(f"'{id}'" for id in ids)
- ids_str = f"({ids_str})"
- request = gpdb_20160503_models.DeleteCollectionDataRequest(
- dbinstance_id=self.config.instance_id,
- region_id=self.config.region_id,
- namespace=self.config.namespace,
- namespace_password=self.config.namespace_password,
- collection=self._collection_name,
- collection_data=None,
- collection_data_filter=f"ref_doc_id IN {ids_str}",
- )
- self._client.delete_collection_data(request)
+ self.analyticdb_vector.delete_by_ids(ids)
def delete_by_metadata_field(self, key: str, value: str) -> None:
- from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
-
- request = gpdb_20160503_models.DeleteCollectionDataRequest(
- dbinstance_id=self.config.instance_id,
- region_id=self.config.region_id,
- namespace=self.config.namespace,
- namespace_password=self.config.namespace_password,
- collection=self._collection_name,
- collection_data=None,
- collection_data_filter=f"metadata_ ->> '{key}' = '{value}'",
- )
- self._client.delete_collection_data(request)
+ self.analyticdb_vector.delete_by_metadata_field(key, value)
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
- from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
-
- score_threshold = kwargs.get("score_threshold") or 0.0
- request = gpdb_20160503_models.QueryCollectionDataRequest(
- dbinstance_id=self.config.instance_id,
- region_id=self.config.region_id,
- namespace=self.config.namespace,
- namespace_password=self.config.namespace_password,
- collection=self._collection_name,
- include_values=kwargs.pop("include_values", True),
- metrics=self.config.metrics,
- vector=query_vector,
- content=None,
- top_k=kwargs.get("top_k", 4),
- filter=None,
- )
- response = self._client.query_collection_data(request)
- documents = []
- for match in response.body.matches.match:
- if match.score > score_threshold:
- metadata = json.loads(match.metadata.get("metadata_"))
- metadata["score"] = match.score
- doc = Document(
- page_content=match.metadata.get("page_content"),
- metadata=metadata,
- )
- documents.append(doc)
- documents = sorted(documents, key=lambda x: x.metadata["score"], reverse=True)
- return documents
+ return self.analyticdb_vector.search_by_vector(query_vector)
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
- from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
-
- score_threshold = float(kwargs.get("score_threshold") or 0.0)
- request = gpdb_20160503_models.QueryCollectionDataRequest(
- dbinstance_id=self.config.instance_id,
- region_id=self.config.region_id,
- namespace=self.config.namespace,
- namespace_password=self.config.namespace_password,
- collection=self._collection_name,
- include_values=kwargs.pop("include_values", True),
- metrics=self.config.metrics,
- vector=None,
- content=query,
- top_k=kwargs.get("top_k", 4),
- filter=None,
- )
- response = self._client.query_collection_data(request)
- documents = []
- for match in response.body.matches.match:
- if match.score > score_threshold:
- metadata = json.loads(match.metadata.get("metadata_"))
- metadata["score"] = match.score
- doc = Document(
- page_content=match.metadata.get("page_content"),
- vector=match.metadata.get("vector"),
- metadata=metadata,
- )
- documents.append(doc)
- documents = sorted(documents, key=lambda x: x.metadata["score"], reverse=True)
- return documents
+ return self.analyticdb_vector.search_by_full_text(query, **kwargs)
def delete(self) -> None:
- try:
- from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
-
- request = gpdb_20160503_models.DeleteCollectionRequest(
- collection=self._collection_name,
- dbinstance_id=self.config.instance_id,
- namespace=self.config.namespace,
- namespace_password=self.config.namespace_password,
- region_id=self.config.region_id,
- )
- self._client.delete_collection(request)
- except Exception as e:
- raise e
+ self.analyticdb_vector.delete()
class AnalyticdbVectorFactory(AbstractVectorFactory):
- def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings):
+ def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> AnalyticdbVector:
if dataset.index_struct_dict:
class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"]
collection_name = class_prefix.lower()
@@ -313,26 +65,9 @@ class AnalyticdbVectorFactory(AbstractVectorFactory):
collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower()
dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.ANALYTICDB, collection_name))
- # handle optional params
- if dify_config.ANALYTICDB_KEY_ID is None:
- raise ValueError("ANALYTICDB_KEY_ID should not be None")
- if dify_config.ANALYTICDB_KEY_SECRET is None:
- raise ValueError("ANALYTICDB_KEY_SECRET should not be None")
- if dify_config.ANALYTICDB_REGION_ID is None:
- raise ValueError("ANALYTICDB_REGION_ID should not be None")
- if dify_config.ANALYTICDB_INSTANCE_ID is None:
- raise ValueError("ANALYTICDB_INSTANCE_ID should not be None")
- if dify_config.ANALYTICDB_ACCOUNT is None:
- raise ValueError("ANALYTICDB_ACCOUNT should not be None")
- if dify_config.ANALYTICDB_PASSWORD is None:
- raise ValueError("ANALYTICDB_PASSWORD should not be None")
- if dify_config.ANALYTICDB_NAMESPACE is None:
- raise ValueError("ANALYTICDB_NAMESPACE should not be None")
- if dify_config.ANALYTICDB_NAMESPACE_PASSWORD is None:
- raise ValueError("ANALYTICDB_NAMESPACE_PASSWORD should not be None")
- return AnalyticdbVector(
- collection_name,
- AnalyticdbConfig(
+ if dify_config.ANALYTICDB_HOST is None:
+ # implemented through OpenAPI
+ apiConfig = AnalyticdbVectorOpenAPIConfig(
access_key_id=dify_config.ANALYTICDB_KEY_ID,
access_key_secret=dify_config.ANALYTICDB_KEY_SECRET,
region_id=dify_config.ANALYTICDB_REGION_ID,
@@ -341,5 +76,22 @@ class AnalyticdbVectorFactory(AbstractVectorFactory):
account_password=dify_config.ANALYTICDB_PASSWORD,
namespace=dify_config.ANALYTICDB_NAMESPACE,
namespace_password=dify_config.ANALYTICDB_NAMESPACE_PASSWORD,
- ),
+ )
+ sqlConfig = None
+ else:
+ # implemented through sql
+ sqlConfig = AnalyticdbVectorBySqlConfig(
+ host=dify_config.ANALYTICDB_HOST,
+ port=dify_config.ANALYTICDB_PORT,
+ account=dify_config.ANALYTICDB_ACCOUNT,
+ account_password=dify_config.ANALYTICDB_PASSWORD,
+ min_connection=dify_config.ANALYTICDB_MIN_CONNECTION,
+ max_connection=dify_config.ANALYTICDB_MAX_CONNECTION,
+ namespace=dify_config.ANALYTICDB_NAMESPACE,
+ )
+ apiConfig = None
+ return AnalyticdbVector(
+ collection_name,
+ apiConfig,
+ sqlConfig,
)
diff --git a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py
new file mode 100644
index 0000000000..05e0ebc54f
--- /dev/null
+++ b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py
@@ -0,0 +1,309 @@
+import json
+from typing import Any
+
+from pydantic import BaseModel, model_validator
+
+_import_err_msg = (
+ "`alibabacloud_gpdb20160503` and `alibabacloud_tea_openapi` packages not found, "
+ "please run `pip install alibabacloud_gpdb20160503 alibabacloud_tea_openapi`"
+)
+
+from core.rag.models.document import Document
+from extensions.ext_redis import redis_client
+
+
+class AnalyticdbVectorOpenAPIConfig(BaseModel):
+ access_key_id: str
+ access_key_secret: str
+ region_id: str
+ instance_id: str
+ account: str
+ account_password: str
+ namespace: str = "dify"
+ namespace_password: str = (None,)
+ metrics: str = "cosine"
+ read_timeout: int = 60000
+
+ @model_validator(mode="before")
+ @classmethod
+ def validate_config(cls, values: dict) -> dict:
+ if not values["access_key_id"]:
+ raise ValueError("config ANALYTICDB_KEY_ID is required")
+ if not values["access_key_secret"]:
+ raise ValueError("config ANALYTICDB_KEY_SECRET is required")
+ if not values["region_id"]:
+ raise ValueError("config ANALYTICDB_REGION_ID is required")
+ if not values["instance_id"]:
+ raise ValueError("config ANALYTICDB_INSTANCE_ID is required")
+ if not values["account"]:
+ raise ValueError("config ANALYTICDB_ACCOUNT is required")
+ if not values["account_password"]:
+ raise ValueError("config ANALYTICDB_PASSWORD is required")
+ if not values["namespace_password"]:
+ raise ValueError("config ANALYTICDB_NAMESPACE_PASSWORD is required")
+ return values
+
+ def to_analyticdb_client_params(self):
+ return {
+ "access_key_id": self.access_key_id,
+ "access_key_secret": self.access_key_secret,
+ "region_id": self.region_id,
+ "read_timeout": self.read_timeout,
+ }
+
+
+class AnalyticdbVectorOpenAPI:
+ def __init__(self, collection_name: str, config: AnalyticdbVectorOpenAPIConfig):
+ try:
+ from alibabacloud_gpdb20160503.client import Client
+ from alibabacloud_tea_openapi import models as open_api_models
+ except:
+ raise ImportError(_import_err_msg)
+ self._collection_name = collection_name.lower()
+ self.config = config
+ self._client_config = open_api_models.Config(user_agent="dify", **config.to_analyticdb_client_params())
+ self._client = Client(self._client_config)
+ self._initialize()
+
+ def _initialize(self) -> None:
+ cache_key = f"vector_initialize_{self.config.instance_id}"
+ lock_name = f"{cache_key}_lock"
+ with redis_client.lock(lock_name, timeout=20):
+ database_exist_cache_key = f"vector_initialize_{self.config.instance_id}"
+ if redis_client.get(database_exist_cache_key):
+ return
+ self._initialize_vector_database()
+ self._create_namespace_if_not_exists()
+ redis_client.set(database_exist_cache_key, 1, ex=3600)
+
+ def _initialize_vector_database(self) -> None:
+ from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
+
+ request = gpdb_20160503_models.InitVectorDatabaseRequest(
+ dbinstance_id=self.config.instance_id,
+ region_id=self.config.region_id,
+ manager_account=self.config.account,
+ manager_account_password=self.config.account_password,
+ )
+ self._client.init_vector_database(request)
+
+ def _create_namespace_if_not_exists(self) -> None:
+ from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
+ from Tea.exceptions import TeaException
+
+ try:
+ request = gpdb_20160503_models.DescribeNamespaceRequest(
+ dbinstance_id=self.config.instance_id,
+ region_id=self.config.region_id,
+ namespace=self.config.namespace,
+ manager_account=self.config.account,
+ manager_account_password=self.config.account_password,
+ )
+ self._client.describe_namespace(request)
+ except TeaException as e:
+ if e.statusCode == 404:
+ request = gpdb_20160503_models.CreateNamespaceRequest(
+ dbinstance_id=self.config.instance_id,
+ region_id=self.config.region_id,
+ manager_account=self.config.account,
+ manager_account_password=self.config.account_password,
+ namespace=self.config.namespace,
+ namespace_password=self.config.namespace_password,
+ )
+ self._client.create_namespace(request)
+ else:
+ raise ValueError(f"failed to create namespace {self.config.namespace}: {e}")
+
+ def _create_collection_if_not_exists(self, embedding_dimension: int):
+ from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
+ from Tea.exceptions import TeaException
+
+ cache_key = f"vector_indexing_{self._collection_name}"
+ lock_name = f"{cache_key}_lock"
+ with redis_client.lock(lock_name, timeout=20):
+ collection_exist_cache_key = f"vector_indexing_{self._collection_name}"
+ if redis_client.get(collection_exist_cache_key):
+ return
+ try:
+ request = gpdb_20160503_models.DescribeCollectionRequest(
+ dbinstance_id=self.config.instance_id,
+ region_id=self.config.region_id,
+ namespace=self.config.namespace,
+ namespace_password=self.config.namespace_password,
+ collection=self._collection_name,
+ )
+ self._client.describe_collection(request)
+ except TeaException as e:
+ if e.statusCode == 404:
+ metadata = '{"ref_doc_id":"text","page_content":"text","metadata_":"jsonb"}'
+ full_text_retrieval_fields = "page_content"
+ request = gpdb_20160503_models.CreateCollectionRequest(
+ dbinstance_id=self.config.instance_id,
+ region_id=self.config.region_id,
+ manager_account=self.config.account,
+ manager_account_password=self.config.account_password,
+ namespace=self.config.namespace,
+ collection=self._collection_name,
+ dimension=embedding_dimension,
+ metrics=self.config.metrics,
+ metadata=metadata,
+ full_text_retrieval_fields=full_text_retrieval_fields,
+ )
+ self._client.create_collection(request)
+ else:
+ raise ValueError(f"failed to create collection {self._collection_name}: {e}")
+ redis_client.set(collection_exist_cache_key, 1, ex=3600)
+
+ def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
+ from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
+
+ rows: list[gpdb_20160503_models.UpsertCollectionDataRequestRows] = []
+ for doc, embedding in zip(documents, embeddings, strict=True):
+ metadata = {
+ "ref_doc_id": doc.metadata["doc_id"],
+ "page_content": doc.page_content,
+ "metadata_": json.dumps(doc.metadata),
+ }
+ rows.append(
+ gpdb_20160503_models.UpsertCollectionDataRequestRows(
+ vector=embedding,
+ metadata=metadata,
+ )
+ )
+ request = gpdb_20160503_models.UpsertCollectionDataRequest(
+ dbinstance_id=self.config.instance_id,
+ region_id=self.config.region_id,
+ namespace=self.config.namespace,
+ namespace_password=self.config.namespace_password,
+ collection=self._collection_name,
+ rows=rows,
+ )
+ self._client.upsert_collection_data(request)
+
+ def text_exists(self, id: str) -> bool:
+ from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
+
+ request = gpdb_20160503_models.QueryCollectionDataRequest(
+ dbinstance_id=self.config.instance_id,
+ region_id=self.config.region_id,
+ namespace=self.config.namespace,
+ namespace_password=self.config.namespace_password,
+ collection=self._collection_name,
+ metrics=self.config.metrics,
+ include_values=True,
+ vector=None,
+ content=None,
+ top_k=1,
+ filter=f"ref_doc_id='{id}'",
+ )
+ response = self._client.query_collection_data(request)
+ return len(response.body.matches.match) > 0
+
+ def delete_by_ids(self, ids: list[str]) -> None:
+ from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
+
+ ids_str = ",".join(f"'{id}'" for id in ids)
+ ids_str = f"({ids_str})"
+ request = gpdb_20160503_models.DeleteCollectionDataRequest(
+ dbinstance_id=self.config.instance_id,
+ region_id=self.config.region_id,
+ namespace=self.config.namespace,
+ namespace_password=self.config.namespace_password,
+ collection=self._collection_name,
+ collection_data=None,
+ collection_data_filter=f"ref_doc_id IN {ids_str}",
+ )
+ self._client.delete_collection_data(request)
+
+ def delete_by_metadata_field(self, key: str, value: str) -> None:
+ from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
+
+ request = gpdb_20160503_models.DeleteCollectionDataRequest(
+ dbinstance_id=self.config.instance_id,
+ region_id=self.config.region_id,
+ namespace=self.config.namespace,
+ namespace_password=self.config.namespace_password,
+ collection=self._collection_name,
+ collection_data=None,
+ collection_data_filter=f"metadata_ ->> '{key}' = '{value}'",
+ )
+ self._client.delete_collection_data(request)
+
+ def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
+ from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
+
+ score_threshold = kwargs.get("score_threshold") or 0.0
+ request = gpdb_20160503_models.QueryCollectionDataRequest(
+ dbinstance_id=self.config.instance_id,
+ region_id=self.config.region_id,
+ namespace=self.config.namespace,
+ namespace_password=self.config.namespace_password,
+ collection=self._collection_name,
+ include_values=kwargs.pop("include_values", True),
+ metrics=self.config.metrics,
+ vector=query_vector,
+ content=None,
+ top_k=kwargs.get("top_k", 4),
+ filter=None,
+ )
+ response = self._client.query_collection_data(request)
+ documents = []
+ for match in response.body.matches.match:
+ if match.score > score_threshold:
+ metadata = json.loads(match.metadata.get("metadata_"))
+ metadata["score"] = match.score
+ doc = Document(
+ page_content=match.metadata.get("page_content"),
+ vector=match.values.value,
+ metadata=metadata,
+ )
+ documents.append(doc)
+ documents = sorted(documents, key=lambda x: x.metadata["score"], reverse=True)
+ return documents
+
+ def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
+ from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
+
+ score_threshold = float(kwargs.get("score_threshold") or 0.0)
+ request = gpdb_20160503_models.QueryCollectionDataRequest(
+ dbinstance_id=self.config.instance_id,
+ region_id=self.config.region_id,
+ namespace=self.config.namespace,
+ namespace_password=self.config.namespace_password,
+ collection=self._collection_name,
+ include_values=kwargs.pop("include_values", True),
+ metrics=self.config.metrics,
+ vector=None,
+ content=query,
+ top_k=kwargs.get("top_k", 4),
+ filter=None,
+ )
+ response = self._client.query_collection_data(request)
+ documents = []
+ for match in response.body.matches.match:
+ if match.score > score_threshold:
+ metadata = json.loads(match.metadata.get("metadata_"))
+ metadata["score"] = match.score
+ doc = Document(
+ page_content=match.metadata.get("page_content"),
+ vector=match.values.value,
+ metadata=metadata,
+ )
+ documents.append(doc)
+ documents = sorted(documents, key=lambda x: x.metadata["score"], reverse=True)
+ return documents
+
+ def delete(self) -> None:
+ try:
+ from alibabacloud_gpdb20160503 import models as gpdb_20160503_models
+
+ request = gpdb_20160503_models.DeleteCollectionRequest(
+ collection=self._collection_name,
+ dbinstance_id=self.config.instance_id,
+ namespace=self.config.namespace,
+ namespace_password=self.config.namespace_password,
+ region_id=self.config.region_id,
+ )
+ self._client.delete_collection(request)
+ except Exception as e:
+ raise e
diff --git a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py
new file mode 100644
index 0000000000..e474db5cb2
--- /dev/null
+++ b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py
@@ -0,0 +1,245 @@
+import json
+import uuid
+from contextlib import contextmanager
+from typing import Any
+
+import psycopg2.extras
+import psycopg2.pool
+from pydantic import BaseModel, model_validator
+
+from core.rag.models.document import Document
+from extensions.ext_redis import redis_client
+
+
+class AnalyticdbVectorBySqlConfig(BaseModel):
+ host: str
+ port: int
+ account: str
+ account_password: str
+ min_connection: int
+ max_connection: int
+ namespace: str = "dify"
+ metrics: str = "cosine"
+
+ @model_validator(mode="before")
+ @classmethod
+ def validate_config(cls, values: dict) -> dict:
+ if not values["host"]:
+ raise ValueError("config ANALYTICDB_HOST is required")
+ if not values["port"]:
+ raise ValueError("config ANALYTICDB_PORT is required")
+ if not values["account"]:
+ raise ValueError("config ANALYTICDB_ACCOUNT is required")
+ if not values["account_password"]:
+ raise ValueError("config ANALYTICDB_PASSWORD is required")
+ if not values["min_connection"]:
+ raise ValueError("config ANALYTICDB_MIN_CONNECTION is required")
+ if not values["max_connection"]:
+ raise ValueError("config ANALYTICDB_MAX_CONNECTION is required")
+ if values["min_connection"] > values["max_connection"]:
+ raise ValueError("config ANALYTICDB_MIN_CONNECTION should less than ANALYTICDB_MAX_CONNECTION")
+ return values
+
+
+class AnalyticdbVectorBySql:
+ def __init__(self, collection_name: str, config: AnalyticdbVectorBySqlConfig):
+ self._collection_name = collection_name.lower()
+ self.databaseName = "knowledgebase"
+ self.config = config
+ self.table_name = f"{self.config.namespace}.{self._collection_name}"
+ self.pool = None
+ self._initialize()
+ if not self.pool:
+ self.pool = self._create_connection_pool()
+
+ def _initialize(self) -> None:
+ cache_key = f"vector_initialize_{self.config.host}"
+ lock_name = f"{cache_key}_lock"
+ with redis_client.lock(lock_name, timeout=20):
+ database_exist_cache_key = f"vector_initialize_{self.config.host}"
+ if redis_client.get(database_exist_cache_key):
+ return
+ self._initialize_vector_database()
+ redis_client.set(database_exist_cache_key, 1, ex=3600)
+
+ def _create_connection_pool(self):
+ return psycopg2.pool.SimpleConnectionPool(
+ self.config.min_connection,
+ self.config.max_connection,
+ host=self.config.host,
+ port=self.config.port,
+ user=self.config.account,
+ password=self.config.account_password,
+ database=self.databaseName,
+ )
+
+ @contextmanager
+ def _get_cursor(self):
+ conn = self.pool.getconn()
+ cur = conn.cursor()
+ try:
+ yield cur
+ finally:
+ cur.close()
+ conn.commit()
+ self.pool.putconn(conn)
+
+ def _initialize_vector_database(self) -> None:
+ conn = psycopg2.connect(
+ host=self.config.host,
+ port=self.config.port,
+ user=self.config.account,
+ password=self.config.account_password,
+ database="postgres",
+ )
+ conn.autocommit = True
+ cur = conn.cursor()
+ try:
+ cur.execute(f"CREATE DATABASE {self.databaseName}")
+ except Exception as e:
+ if "already exists" in str(e):
+ return
+ raise e
+ finally:
+ cur.close()
+ conn.close()
+ self.pool = self._create_connection_pool()
+ with self._get_cursor() as cur:
+ try:
+ cur.execute("CREATE TEXT SEARCH CONFIGURATION zh_cn (PARSER = zhparser)")
+ cur.execute("ALTER TEXT SEARCH CONFIGURATION zh_cn ADD MAPPING FOR n,v,a,i,e,l,x WITH simple")
+ except Exception as e:
+ if "already exists" not in str(e):
+ raise e
+ cur.execute(
+ "CREATE OR REPLACE FUNCTION "
+ "public.to_tsquery_from_text(txt text, lang regconfig DEFAULT 'english'::regconfig) "
+ "RETURNS tsquery LANGUAGE sql IMMUTABLE STRICT AS $function$ "
+ "SELECT to_tsquery(lang, COALESCE(string_agg(split_part(word, ':', 1), ' | '), '')) "
+ "FROM (SELECT unnest(string_to_array(to_tsvector(lang, txt)::text, ' ')) AS word) "
+ "AS words_only;$function$"
+ )
+ cur.execute(f"CREATE SCHEMA IF NOT EXISTS {self.config.namespace}")
+
+ def _create_collection_if_not_exists(self, embedding_dimension: int):
+ cache_key = f"vector_indexing_{self._collection_name}"
+ lock_name = f"{cache_key}_lock"
+ with redis_client.lock(lock_name, timeout=20):
+ collection_exist_cache_key = f"vector_indexing_{self._collection_name}"
+ if redis_client.get(collection_exist_cache_key):
+ return
+ with self._get_cursor() as cur:
+ cur.execute(
+ f"CREATE TABLE IF NOT EXISTS {self.table_name}("
+ f"id text PRIMARY KEY,"
+ f"vector real[], ref_doc_id text, page_content text, metadata_ jsonb, "
+ f"to_tsvector TSVECTOR"
+ f") WITH (fillfactor=70) DISTRIBUTED BY (id);"
+ )
+ if embedding_dimension is not None:
+ index_name = f"{self._collection_name}_embedding_idx"
+ cur.execute(f"ALTER TABLE {self.table_name} ALTER COLUMN vector SET STORAGE PLAIN")
+ cur.execute(
+ f"CREATE INDEX {index_name} ON {self.table_name} USING ann(vector) "
+ f"WITH(dim='{embedding_dimension}', distancemeasure='{self.config.metrics}', "
+ f"pq_enable=0, external_storage=0)"
+ )
+ cur.execute(f"CREATE INDEX ON {self.table_name} USING gin(to_tsvector)")
+ redis_client.set(collection_exist_cache_key, 1, ex=3600)
+
+ def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
+ values = []
+ id_prefix = str(uuid.uuid4()) + "_"
+ sql = f"""
+ INSERT INTO {self.table_name}
+ (id, ref_doc_id, vector, page_content, metadata_, to_tsvector)
+ VALUES (%s, %s, %s, %s, %s, to_tsvector('zh_cn', %s));
+ """
+ for i, doc in enumerate(documents):
+ values.append(
+ (
+ id_prefix + str(i),
+ doc.metadata.get("doc_id", str(uuid.uuid4())),
+ embeddings[i],
+ doc.page_content,
+ json.dumps(doc.metadata),
+ doc.page_content,
+ )
+ )
+ with self._get_cursor() as cur:
+ psycopg2.extras.execute_batch(cur, sql, values)
+
+ def text_exists(self, id: str) -> bool:
+ with self._get_cursor() as cur:
+ cur.execute(f"SELECT id FROM {self.table_name} WHERE ref_doc_id = %s", (id,))
+ return cur.fetchone() is not None
+
+ def delete_by_ids(self, ids: list[str]) -> None:
+ with self._get_cursor() as cur:
+ try:
+ cur.execute(f"DELETE FROM {self.table_name} WHERE ref_doc_id IN %s", (tuple(ids),))
+ except Exception as e:
+ if "does not exist" not in str(e):
+ raise e
+
+ def delete_by_metadata_field(self, key: str, value: str) -> None:
+ with self._get_cursor() as cur:
+ try:
+ cur.execute(f"DELETE FROM {self.table_name} WHERE metadata_->>%s = %s", (key, value))
+ except Exception as e:
+ if "does not exist" not in str(e):
+ raise e
+
+ def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
+ top_k = kwargs.get("top_k", 4)
+ score_threshold = float(kwargs.get("score_threshold") or 0.0)
+ with self._get_cursor() as cur:
+ query_vector_str = json.dumps(query_vector)
+ query_vector_str = "{" + query_vector_str[1:-1] + "}"
+ cur.execute(
+ f"SELECT t.id AS id, t.vector AS vector, (1.0 - t.score) AS score, "
+ f"t.page_content as page_content, t.metadata_ AS metadata_ "
+ f"FROM (SELECT id, vector, page_content, metadata_, vector <=> %s AS score "
+ f"FROM {self.table_name} ORDER BY score LIMIT {top_k} ) t",
+ (query_vector_str,),
+ )
+ documents = []
+ for record in cur:
+ id, vector, score, page_content, metadata = record
+ if score > score_threshold:
+ metadata["score"] = score
+ doc = Document(
+ page_content=page_content,
+ vector=vector,
+ metadata=metadata,
+ )
+ documents.append(doc)
+ return documents
+
+ def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
+ top_k = kwargs.get("top_k", 4)
+ with self._get_cursor() as cur:
+ cur.execute(
+ f"""SELECT id, vector, page_content, metadata_,
+ ts_rank(to_tsvector, to_tsquery_from_text(%s, 'zh_cn'), 32) AS score
+ FROM {self.table_name}
+ WHERE to_tsvector@@to_tsquery_from_text(%s, 'zh_cn')
+ ORDER BY score DESC
+ LIMIT {top_k}""",
+ (f"'{query}'", f"'{query}'"),
+ )
+ documents = []
+ for record in cur:
+ id, vector, page_content, metadata, score = record
+ metadata["score"] = score
+ doc = Document(
+ page_content=page_content,
+ vector=vector,
+ metadata=metadata,
+ )
+ documents.append(doc)
+ return documents
+
+ def delete(self) -> None:
+ with self._get_cursor() as cur:
+ cur.execute(f"DROP TABLE IF EXISTS {self.table_name}")
diff --git a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py
index 98da5e3d5e..d26726e864 100644
--- a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py
+++ b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py
@@ -242,7 +242,7 @@ class CouchbaseVector(BaseVector):
try:
self._cluster.query(query, named_parameters={"doc_ids": ids}).execute()
except Exception as e:
- logger.exception(e)
+ logger.exception(f"Failed to delete documents, ids: {ids}")
def delete_by_document_id(self, document_id: str):
query = f"""
diff --git a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py
index 79b827797c..08234c0c91 100644
--- a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py
+++ b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py
@@ -81,7 +81,7 @@ class LindormVectorStore(BaseVector):
"ids": batch_ids}, _source=False)
return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]}
except Exception as e:
- logger.exception(f"Error fetching batch {batch_ids}: {e}")
+ logger.exception(f"Error fetching batch {batch_ids}")
return set()
@retry(stop=stop_after_attempt(3), wait=wait_fixed(60))
@@ -99,7 +99,7 @@ class LindormVectorStore(BaseVector):
)
return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]}
except Exception as e:
- logger.exception(f"Error fetching batch {batch_ids}: {e}")
+ logger.exception(f"Error fetching batch ids: {batch_ids}")
return set()
if ids is None:
@@ -187,7 +187,7 @@ class LindormVectorStore(BaseVector):
logger.warning(
f"Index '{self._collection_name}' does not exist. No deletion performed.")
except Exception as e:
- logger.exception(f"Error occurred while deleting the index: {e}")
+ logger.exception(f"Error occurred while deleting the index: {self._collection_name}")
raise e
def text_exists(self, id: str) -> bool:
@@ -213,7 +213,7 @@ class LindormVectorStore(BaseVector):
response = self._client.search(
index=self._collection_name, body=query)
except Exception as e:
- logger.exception(f"Error executing search: {e}")
+ logger.exception(f"Error executing vector search, query: {query}")
raise
docs_and_scores = []
diff --git a/api/core/rag/datasource/vdb/myscale/myscale_vector.py b/api/core/rag/datasource/vdb/myscale/myscale_vector.py
index 2610b60a77..b7b6b803ad 100644
--- a/api/core/rag/datasource/vdb/myscale/myscale_vector.py
+++ b/api/core/rag/datasource/vdb/myscale/myscale_vector.py
@@ -142,7 +142,7 @@ class MyScaleVector(BaseVector):
for r in self._client.query(sql).named_results()
]
except Exception as e:
- logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m")
+ logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") # noqa:TRY401
return []
def delete(self) -> None:
diff --git a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py
index 49eb00f140..7a976d7c3c 100644
--- a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py
+++ b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py
@@ -158,7 +158,7 @@ class OpenSearchVector(BaseVector):
try:
response = self._client.search(index=self._collection_name.lower(), body=query)
except Exception as e:
- logger.exception(f"Error executing search: {e}")
+ logger.exception(f"Error executing vector search, query: {query}")
raise
docs = []
diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py
index 3ac65b88bb..1157c5c8e4 100644
--- a/api/core/rag/embedding/cached_embedding.py
+++ b/api/core/rag/embedding/cached_embedding.py
@@ -69,7 +69,7 @@ class CacheEmbedding(Embeddings):
except IntegrityError:
db.session.rollback()
except Exception as e:
- logging.exception("Failed transform embedding: %s", e)
+ logging.exception("Failed transform embedding")
cache_embeddings = []
try:
for i, embedding in zip(embedding_queue_indices, embedding_queue_embeddings):
@@ -89,7 +89,7 @@ class CacheEmbedding(Embeddings):
db.session.rollback()
except Exception as ex:
db.session.rollback()
- logger.exception("Failed to embed documents: %s", ex)
+ logger.exception("Failed to embed documents: %s")
raise ex
return text_embeddings
@@ -112,7 +112,7 @@ class CacheEmbedding(Embeddings):
embedding_results = (embedding_results / np.linalg.norm(embedding_results)).tolist()
except Exception as ex:
if dify_config.DEBUG:
- logging.exception(f"Failed to embed query text: {ex}")
+ logging.exception(f"Failed to embed query text '{text[:10]}...({len(text)} chars)'")
raise ex
try:
@@ -126,7 +126,7 @@ class CacheEmbedding(Embeddings):
redis_client.setex(embedding_cache_key, 600, encoded_str)
except Exception as ex:
if dify_config.DEBUG:
- logging.exception("Failed to add embedding to redis %s", ex)
+ logging.exception(f"Failed to add embedding to redis for the text '{text[:10]}...({len(text)} chars)'")
raise ex
return embedding_results
diff --git a/api/core/rag/extractor/word_extractor.py b/api/core/rag/extractor/word_extractor.py
index 8e084ab4ff..313bdce48b 100644
--- a/api/core/rag/extractor/word_extractor.py
+++ b/api/core/rag/extractor/word_extractor.py
@@ -229,7 +229,7 @@ class WordExtractor(BaseExtractor):
for i in url_pattern.findall(x.text):
hyperlinks_url = str(i)
except Exception as e:
- logger.exception(e)
+ logger.exception("Failed to parse HYPERLINK xml")
def parse_paragraph(paragraph):
paragraph_content = []
diff --git a/api/core/rag/index_processor/processor/paragraph_index_processor.py b/api/core/rag/index_processor/processor/paragraph_index_processor.py
index ed5712220f..a631f953ce 100644
--- a/api/core/rag/index_processor/processor/paragraph_index_processor.py
+++ b/api/core/rag/index_processor/processor/paragraph_index_processor.py
@@ -11,6 +11,7 @@ from core.rag.extractor.entity.extract_setting import ExtractSetting
from core.rag.extractor.extract_processor import ExtractProcessor
from core.rag.index_processor.index_processor_base import BaseIndexProcessor
from core.rag.models.document import Document
+from core.tools.utils.text_processing_utils import remove_leading_symbols
from libs import helper
from models.dataset import Dataset
@@ -43,11 +44,7 @@ class ParagraphIndexProcessor(BaseIndexProcessor):
document_node.metadata["doc_id"] = doc_id
document_node.metadata["doc_hash"] = hash
# delete Splitter character
- page_content = document_node.page_content
- if page_content.startswith(".") or page_content.startswith("。"):
- page_content = page_content[1:].strip()
- else:
- page_content = page_content
+ page_content = remove_leading_symbols(document_node.page_content).strip()
if len(page_content) > 0:
document_node.page_content = page_content
split_documents.append(document_node)
diff --git a/api/core/rag/index_processor/processor/qa_index_processor.py b/api/core/rag/index_processor/processor/qa_index_processor.py
index 1dbc473281..320f0157a1 100644
--- a/api/core/rag/index_processor/processor/qa_index_processor.py
+++ b/api/core/rag/index_processor/processor/qa_index_processor.py
@@ -18,6 +18,7 @@ from core.rag.extractor.entity.extract_setting import ExtractSetting
from core.rag.extractor.extract_processor import ExtractProcessor
from core.rag.index_processor.index_processor_base import BaseIndexProcessor
from core.rag.models.document import Document
+from core.tools.utils.text_processing_utils import remove_leading_symbols
from libs import helper
from models.dataset import Dataset
@@ -53,11 +54,7 @@ class QAIndexProcessor(BaseIndexProcessor):
document_node.metadata["doc_hash"] = hash
# delete Splitter character
page_content = document_node.page_content
- if page_content.startswith(".") or page_content.startswith("。"):
- page_content = page_content[1:]
- else:
- page_content = page_content
- document_node.page_content = page_content
+ document_node.page_content = remove_leading_symbols(page_content)
split_documents.append(document_node)
all_documents.extend(split_documents)
for i in range(0, len(all_documents), 10):
@@ -159,7 +156,7 @@ class QAIndexProcessor(BaseIndexProcessor):
qa_documents.append(qa_document)
format_documents.extend(qa_documents)
except Exception as e:
- logging.exception(e)
+ logging.exception("Failed to format qa document")
all_qa_documents.extend(format_documents)
diff --git a/api/core/rag/rerank/weight_rerank.py b/api/core/rag/rerank/weight_rerank.py
index 2e3fbe04e2..b706f29bb1 100644
--- a/api/core/rag/rerank/weight_rerank.py
+++ b/api/core/rag/rerank/weight_rerank.py
@@ -36,23 +36,21 @@ class WeightRerankRunner(BaseRerankRunner):
:return:
"""
- docs = []
- doc_id = []
unique_documents = []
+ doc_id = set()
for document in documents:
- if document.metadata["doc_id"] not in doc_id:
- doc_id.append(document.metadata["doc_id"])
- docs.append(document.page_content)
+ doc_id = document.metadata.get("doc_id")
+ if doc_id not in doc_id:
+ doc_id.add(doc_id)
unique_documents.append(document)
documents = unique_documents
- rerank_documents = []
query_scores = self._calculate_keyword_score(query, documents)
-
query_vector_scores = self._calculate_cosine(self.tenant_id, query, documents, self.weights.vector_setting)
+
+ rerank_documents = []
for document, query_score, query_vector_score in zip(documents, query_scores, query_vector_scores):
- # format document
score = (
self.weights.vector_setting.vector_weight * query_vector_score
+ self.weights.keyword_setting.keyword_weight * query_score
@@ -61,7 +59,8 @@ class WeightRerankRunner(BaseRerankRunner):
continue
document.metadata["score"] = score
rerank_documents.append(document)
- rerank_documents = sorted(rerank_documents, key=lambda x: x.metadata["score"], reverse=True)
+
+ rerank_documents.sort(key=lambda x: x.metadata["score"], reverse=True)
return rerank_documents[:top_n] if top_n else rerank_documents
def _calculate_keyword_score(self, query: str, documents: list[Document]) -> list[float]:
diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_news.py b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_news.py
new file mode 100644
index 0000000000..3a6fd394a8
--- /dev/null
+++ b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_news.py
@@ -0,0 +1,87 @@
+from typing import Any
+
+from duckduckgo_search import DDGS
+
+from core.model_runtime.entities.message_entities import SystemPromptMessage
+from core.tools.entities.tool_entities import ToolInvokeMessage
+from core.tools.tool.builtin_tool import BuiltinTool
+
+SUMMARY_PROMPT = """
+User's query:
+{query}
+
+Here are the news results:
+{content}
+
+Please summarize the news in a few sentences.
+"""
+
+
+class DuckDuckGoNewsSearchTool(BuiltinTool):
+ """
+ Tool for performing a news search using DuckDuckGo search engine.
+ """
+
+ def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]:
+ query_dict = {
+ "keywords": tool_parameters.get("query"),
+ "timelimit": tool_parameters.get("timelimit"),
+ "max_results": tool_parameters.get("max_results"),
+ "safesearch": "moderate",
+ "region": "wt-wt",
+ }
+ try:
+ response = list(DDGS().news(**query_dict))
+ if not response:
+ return [self.create_text_message("No news found matching your criteria.")]
+ except Exception as e:
+ return [self.create_text_message(f"Error searching news: {str(e)}")]
+
+ require_summary = tool_parameters.get("require_summary", False)
+
+ if require_summary:
+ results = "\n".join([f"{res.get('title')}: {res.get('body')}" for res in response])
+ results = self.summary_results(user_id=user_id, content=results, query=query_dict["keywords"])
+ return self.create_text_message(text=results)
+
+ # Create rich markdown content for each news item
+ markdown_result = "\n\n"
+ json_result = []
+
+ for res in response:
+ markdown_result += f"### {res.get('title', 'Untitled')}\n\n"
+ if res.get("date"):
+ markdown_result += f"**Date:** {res.get('date')}\n\n"
+ if res.get("body"):
+ markdown_result += f"{res.get('body')}\n\n"
+ if res.get("source"):
+ markdown_result += f"*Source: {res.get('source')}*\n\n"
+ if res.get("image"):
+ markdown_result += f"})\n\n"
+ markdown_result += f"[Read more]({res.get('url', '')})\n\n---\n\n"
+
+ json_result.append(
+ self.create_json_message(
+ {
+ "title": res.get("title", ""),
+ "date": res.get("date", ""),
+ "body": res.get("body", ""),
+ "url": res.get("url", ""),
+ "image": res.get("image", ""),
+ "source": res.get("source", ""),
+ }
+ )
+ )
+
+ return [self.create_text_message(markdown_result)] + json_result
+
+ def summary_results(self, user_id: str, content: str, query: str) -> str:
+ prompt = SUMMARY_PROMPT.format(query=query, content=content)
+ summary = self.invoke_model(
+ user_id=user_id,
+ prompt_messages=[
+ SystemPromptMessage(content=prompt),
+ ],
+ stop=[],
+ )
+ return summary.message.content
diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_news.yaml b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_news.yaml
new file mode 100644
index 0000000000..eb2b67b7c9
--- /dev/null
+++ b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_news.yaml
@@ -0,0 +1,71 @@
+identity:
+ name: ddgo_news
+ author: Assistant
+ label:
+ en_US: DuckDuckGo News Search
+ zh_Hans: DuckDuckGo 新闻搜索
+description:
+ human:
+ en_US: Perform news searches on DuckDuckGo and get results.
+ zh_Hans: 在 DuckDuckGo 上进行新闻搜索并获取结果。
+ llm: Perform news searches on DuckDuckGo and get results.
+parameters:
+ - name: query
+ type: string
+ required: true
+ label:
+ en_US: Query String
+ zh_Hans: 查询语句
+ human_description:
+ en_US: Search Query.
+ zh_Hans: 搜索查询语句。
+ llm_description: Key words for searching
+ form: llm
+ - name: max_results
+ type: number
+ required: true
+ default: 5
+ label:
+ en_US: Max Results
+ zh_Hans: 最大结果数量
+ human_description:
+ en_US: The Max Results
+ zh_Hans: 最大结果数量
+ form: form
+ - name: timelimit
+ type: select
+ required: false
+ options:
+ - value: Day
+ label:
+ en_US: Current Day
+ zh_Hans: 当天
+ - value: Week
+ label:
+ en_US: Current Week
+ zh_Hans: 本周
+ - value: Month
+ label:
+ en_US: Current Month
+ zh_Hans: 当月
+ - value: Year
+ label:
+ en_US: Current Year
+ zh_Hans: 今年
+ label:
+ en_US: Result Time Limit
+ zh_Hans: 结果时间限制
+ human_description:
+ en_US: Use when querying results within a specific time range only.
+ zh_Hans: 只查询一定时间范围内的结果时使用
+ form: form
+ - name: require_summary
+ type: boolean
+ default: false
+ label:
+ en_US: Require Summary
+ zh_Hans: 是否总结
+ human_description:
+ en_US: Whether to pass the news results to llm for summarization.
+ zh_Hans: 是否需要将新闻结果传给大模型总结
+ form: form
diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_video.py b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_video.py
new file mode 100644
index 0000000000..4b74b223c1
--- /dev/null
+++ b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_video.py
@@ -0,0 +1,75 @@
+from typing import Any, ClassVar
+
+from duckduckgo_search import DDGS
+
+from core.tools.entities.tool_entities import ToolInvokeMessage
+from core.tools.tool.builtin_tool import BuiltinTool
+
+
+class DuckDuckGoVideoSearchTool(BuiltinTool):
+ """
+ Tool for performing a video search using DuckDuckGo search engine.
+ """
+
+ IFRAME_TEMPLATE: ClassVar[str] = """
+
+
+
"""
+
+ def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> list[ToolInvokeMessage]:
+ query_dict = {
+ "keywords": tool_parameters.get("query"),
+ "region": tool_parameters.get("region", "wt-wt"),
+ "safesearch": tool_parameters.get("safesearch", "moderate"),
+ "timelimit": tool_parameters.get("timelimit"),
+ "resolution": tool_parameters.get("resolution"),
+ "duration": tool_parameters.get("duration"),
+ "license_videos": tool_parameters.get("license_videos"),
+ "max_results": tool_parameters.get("max_results"),
+ }
+
+ # Remove None values to use API defaults
+ query_dict = {k: v for k, v in query_dict.items() if v is not None}
+
+ # Get proxy URL from parameters
+ proxy_url = tool_parameters.get("proxy_url", "").strip()
+
+ response = DDGS().videos(**query_dict)
+
+ # Create HTML result with embedded iframes
+ markdown_result = "\n\n"
+ json_result = []
+
+ for res in response:
+ title = res.get("title", "")
+ embed_html = res.get("embed_html", "")
+ description = res.get("description", "")
+ content_url = res.get("content", "")
+
+ # Handle TED.com videos
+ if not embed_html and "ted.com/talks" in content_url:
+ embed_url = content_url.replace("www.ted.com", "embed.ted.com")
+ if proxy_url:
+ embed_url = f"{proxy_url}{embed_url}"
+ embed_html = self.IFRAME_TEMPLATE.format(src=embed_url)
+
+ # Original YouTube/other platform handling
+ elif embed_html:
+ embed_url = res.get("embed_url", "")
+ if proxy_url and embed_url:
+ embed_url = f"{proxy_url}{embed_url}"
+ embed_html = self.IFRAME_TEMPLATE.format(src=embed_url)
+
+ markdown_result += f"{title}\n\n"
+ markdown_result += f"{embed_html}\n\n"
+ markdown_result += "---\n\n"
+
+ json_result.append(self.create_json_message(res))
+
+ return [self.create_text_message(markdown_result)] + json_result
diff --git a/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_video.yaml b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_video.yaml
new file mode 100644
index 0000000000..a516d3cb98
--- /dev/null
+++ b/api/core/tools/provider/builtin/duckduckgo/tools/ddgo_video.yaml
@@ -0,0 +1,97 @@
+identity:
+ name: ddgo_video
+ author: Tao Wang
+ label:
+ en_US: DuckDuckGo Video Search
+ zh_Hans: DuckDuckGo 视频搜索
+description:
+ human:
+ en_US: Search and embedded videos.
+ zh_Hans: 搜索并嵌入视频
+ llm: Search videos on duckduckgo and embed videos in iframe
+parameters:
+ - name: query
+ label:
+ en_US: Query String
+ zh_Hans: 查询语句
+ type: string
+ required: true
+ human_description:
+ en_US: Search Query
+ zh_Hans: 搜索查询语句
+ llm_description: Key words for searching
+ form: llm
+ - name: max_results
+ label:
+ en_US: Max Results
+ zh_Hans: 最大结果数量
+ type: number
+ required: true
+ default: 3
+ minimum: 1
+ maximum: 10
+ human_description:
+ en_US: The max results (1-10)
+ zh_Hans: 最大结果数量(1-10)
+ form: form
+ - name: timelimit
+ label:
+ en_US: Result Time Limit
+ zh_Hans: 结果时间限制
+ type: select
+ required: false
+ options:
+ - value: Day
+ label:
+ en_US: Current Day
+ zh_Hans: 当天
+ - value: Week
+ label:
+ en_US: Current Week
+ zh_Hans: 本周
+ - value: Month
+ label:
+ en_US: Current Month
+ zh_Hans: 当月
+ - value: Year
+ label:
+ en_US: Current Year
+ zh_Hans: 今年
+ human_description:
+ en_US: Query results within a specific time range only
+ zh_Hans: 只查询一定时间范围内的结果时使用
+ form: form
+ - name: duration
+ label:
+ en_US: Video Duration
+ zh_Hans: 视频时长
+ type: select
+ required: false
+ options:
+ - value: short
+ label:
+ en_US: Short (<4 minutes)
+ zh_Hans: 短视频(<4分钟)
+ - value: medium
+ label:
+ en_US: Medium (4-20 minutes)
+ zh_Hans: 中等(4-20分钟)
+ - value: long
+ label:
+ en_US: Long (>20 minutes)
+ zh_Hans: 长视频(>20分钟)
+ human_description:
+ en_US: Filter videos by duration
+ zh_Hans: 按时长筛选视频
+ form: form
+ - name: proxy_url
+ label:
+ en_US: Proxy URL
+ zh_Hans: 视频代理地址
+ type: string
+ required: false
+ default: ""
+ human_description:
+ en_US: Proxy URL
+ zh_Hans: 视频代理地址
+ form: form
diff --git a/api/core/tools/provider/builtin/email/tools/send.py b/api/core/tools/provider/builtin/email/tools/send.py
index 35df574a41..bf9e63e1ef 100644
--- a/api/core/tools/provider/builtin/email/tools/send.py
+++ b/api/core/tools/provider/builtin/email/tools/send.py
@@ -38,7 +38,7 @@ def send_mail(parmas: SendEmailToolParameters):
server.sendmail(parmas.email_account, parmas.sender_to, msg.as_string())
return True
except Exception as e:
- logging.exception("send email failed: %s", e)
+ logging.exception("send email failed")
return False
else: # NONE or TLS
try:
@@ -49,5 +49,5 @@ def send_mail(parmas: SendEmailToolParameters):
server.sendmail(parmas.email_account, parmas.sender_to, msg.as_string())
return True
except Exception as e:
- logging.exception("send email failed: %s", e)
+ logging.exception("send email failed")
return False
diff --git a/api/core/tools/provider/builtin/email/tools/send_mail.py b/api/core/tools/provider/builtin/email/tools/send_mail.py
index d51d5439b7..33c040400c 100644
--- a/api/core/tools/provider/builtin/email/tools/send_mail.py
+++ b/api/core/tools/provider/builtin/email/tools/send_mail.py
@@ -17,7 +17,7 @@ class SendMailTool(BuiltinTool):
invoke tools
"""
sender = self.runtime.credentials.get("email_account", "")
- email_rgx = re.compile(r"^[a-zA-Z0-9_-]+@[a-zA-Z0-9_-]+(\.[a-zA-Z0-9_-]+)+$")
+ email_rgx = re.compile(r"^[a-zA-Z0-9._-]+@[a-zA-Z0-9_-]+(\.[a-zA-Z0-9_-]+)+$")
password = self.runtime.credentials.get("email_password", "")
smtp_server = self.runtime.credentials.get("smtp_server", "")
if not smtp_server:
diff --git a/api/core/tools/provider/builtin/email/tools/send_mail_batch.py b/api/core/tools/provider/builtin/email/tools/send_mail_batch.py
index ff7e176990..537dedb27d 100644
--- a/api/core/tools/provider/builtin/email/tools/send_mail_batch.py
+++ b/api/core/tools/provider/builtin/email/tools/send_mail_batch.py
@@ -18,7 +18,7 @@ class SendMailTool(BuiltinTool):
invoke tools
"""
sender = self.runtime.credentials.get("email_account", "")
- email_rgx = re.compile(r"^[a-zA-Z0-9_-]+@[a-zA-Z0-9_-]+(\.[a-zA-Z0-9_-]+)+$")
+ email_rgx = re.compile(r"^[a-zA-Z0-9._-]+@[a-zA-Z0-9_-]+(\.[a-zA-Z0-9_-]+)+$")
password = self.runtime.credentials.get("email_password", "")
smtp_server = self.runtime.credentials.get("smtp_server", "")
if not smtp_server:
diff --git a/api/core/tools/provider/builtin/fal/tools/wizper.py b/api/core/tools/provider/builtin/fal/tools/wizper.py
index 67a5e45118..ba05a62073 100644
--- a/api/core/tools/provider/builtin/fal/tools/wizper.py
+++ b/api/core/tools/provider/builtin/fal/tools/wizper.py
@@ -19,7 +19,7 @@ class WizperTool(BuiltinTool):
version = tool_parameters.get("version", "3")
if audio_file.type != FileType.AUDIO:
- return [self.create_text_message("Not a valid audio file.")]
+ return self.create_text_message("Not a valid audio file.")
api_key = self.runtime.credentials["fal_api_key"]
@@ -31,9 +31,8 @@ class WizperTool(BuiltinTool):
try:
audio_url = fal_client.upload(file_data, mime_type)
-
except Exception as e:
- return [self.create_text_message(f"Error uploading audio file: {str(e)}")]
+ return self.create_text_message(f"Error uploading audio file: {str(e)}")
arguments = {
"audio_url": audio_url,
@@ -49,4 +48,9 @@ class WizperTool(BuiltinTool):
with_logs=False,
)
- return self.create_json_message(result)
+ json_message = self.create_json_message(result)
+
+ text = result.get("text", "")
+ text_message = self.create_text_message(text)
+
+ return [json_message, text_message]
diff --git a/api/core/tools/provider/builtin/gitee_ai/tools/embedding.py b/api/core/tools/provider/builtin/gitee_ai/tools/embedding.py
new file mode 100644
index 0000000000..ab03759c19
--- /dev/null
+++ b/api/core/tools/provider/builtin/gitee_ai/tools/embedding.py
@@ -0,0 +1,25 @@
+from typing import Any, Union
+
+import requests
+
+from core.tools.entities.tool_entities import ToolInvokeMessage
+from core.tools.tool.builtin_tool import BuiltinTool
+
+
+class GiteeAIToolEmbedding(BuiltinTool):
+ def _invoke(
+ self, user_id: str, tool_parameters: dict[str, Any]
+ ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
+ headers = {
+ "content-type": "application/json",
+ "authorization": f"Bearer {self.runtime.credentials['api_key']}",
+ }
+
+ payload = {"inputs": tool_parameters.get("inputs")}
+ model = tool_parameters.get("model", "bge-m3")
+ url = f"https://ai.gitee.com/api/serverless/{model}/embeddings"
+ response = requests.post(url, json=payload, headers=headers)
+ if response.status_code != 200:
+ return self.create_text_message(f"Got Error Response:{response.text}")
+
+ return [self.create_text_message(response.content.decode("utf-8"))]
diff --git a/api/core/tools/provider/builtin/gitee_ai/tools/embedding.yaml b/api/core/tools/provider/builtin/gitee_ai/tools/embedding.yaml
new file mode 100644
index 0000000000..53e569d731
--- /dev/null
+++ b/api/core/tools/provider/builtin/gitee_ai/tools/embedding.yaml
@@ -0,0 +1,37 @@
+identity:
+ name: embedding
+ author: gitee_ai
+ label:
+ en_US: embedding
+ icon: icon.svg
+description:
+ human:
+ en_US: Generate word embeddings using Serverless-supported models (compatible with OpenAI)
+ llm: This tool is used to generate word embeddings from text input.
+parameters:
+ - name: model
+ type: string
+ required: true
+ in: path
+ description:
+ en_US: Supported Embedding (compatible with OpenAI) interface models
+ enum:
+ - bge-m3
+ - bge-large-zh-v1.5
+ - bge-small-zh-v1.5
+ label:
+ en_US: Service Model
+ zh_Hans: 服务模型
+ default: bge-m3
+ form: form
+ - name: inputs
+ type: string
+ required: true
+ label:
+ en_US: Input Text
+ zh_Hans: 输入文本
+ human_description:
+ en_US: The text input used to generate embeddings.
+ zh_Hans: 用于生成词向量的输入文本。
+ llm_description: This text input will be used to generate embeddings.
+ form: llm
diff --git a/api/core/tools/provider/builtin/gitee_ai/tools/text-to-image.py b/api/core/tools/provider/builtin/gitee_ai/tools/text-to-image.py
index 14291d1729..bb0b2c915b 100644
--- a/api/core/tools/provider/builtin/gitee_ai/tools/text-to-image.py
+++ b/api/core/tools/provider/builtin/gitee_ai/tools/text-to-image.py
@@ -6,7 +6,7 @@ from core.tools.entities.tool_entities import ToolInvokeMessage
from core.tools.tool.builtin_tool import BuiltinTool
-class GiteeAITool(BuiltinTool):
+class GiteeAIToolText2Image(BuiltinTool):
def _invoke(
self, user_id: str, tool_parameters: dict[str, Any]
) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
diff --git a/api/core/tools/provider/builtin/tavily/tavily.yaml b/api/core/tools/provider/builtin/tavily/tavily.yaml
index 95820f4d18..aba621b094 100644
--- a/api/core/tools/provider/builtin/tavily/tavily.yaml
+++ b/api/core/tools/provider/builtin/tavily/tavily.yaml
@@ -1,14 +1,12 @@
identity:
- author: Yash Parmar
+ author: Yash Parmar, Kalo Chin
name: tavily
label:
- en_US: Tavily
- zh_Hans: Tavily
- pt_BR: Tavily
+ en_US: Tavily Search & Extract
+ zh_Hans: Tavily 搜索和提取
description:
- en_US: Tavily
- zh_Hans: Tavily
- pt_BR: Tavily
+ en_US: A powerful AI-native search engine and web content extraction tool that provides highly relevant search results and raw content extraction from web pages.
+ zh_Hans: 一个强大的原生AI搜索引擎和网页内容提取工具,提供高度相关的搜索结果和网页原始内容提取。
icon: icon.png
tags:
- search
@@ -19,13 +17,10 @@ credentials_for_provider:
label:
en_US: Tavily API key
zh_Hans: Tavily API key
- pt_BR: Tavily API key
placeholder:
en_US: Please input your Tavily API key
zh_Hans: 请输入你的 Tavily API key
- pt_BR: Please input your Tavily API key
help:
en_US: Get your Tavily API key from Tavily
zh_Hans: 从 TavilyApi 获取您的 Tavily API key
- pt_BR: Get your Tavily API key from Tavily
- url: https://docs.tavily.com/docs/welcome
+ url: https://app.tavily.com/home
diff --git a/api/core/tools/provider/builtin/tavily/tools/tavily_extract.py b/api/core/tools/provider/builtin/tavily/tools/tavily_extract.py
new file mode 100644
index 0000000000..a37548018d
--- /dev/null
+++ b/api/core/tools/provider/builtin/tavily/tools/tavily_extract.py
@@ -0,0 +1,145 @@
+from typing import Any
+
+import requests
+
+from core.tools.entities.tool_entities import ToolInvokeMessage
+from core.tools.tool.builtin_tool import BuiltinTool
+
+TAVILY_API_URL = "https://api.tavily.com"
+
+
+class TavilyExtract:
+ """
+ A class for extracting content from web pages using the Tavily Extract API.
+
+ Args:
+ api_key (str): The API key for accessing the Tavily Extract API.
+
+ Methods:
+ extract_content: Retrieves extracted content from the Tavily Extract API.
+ """
+
+ def __init__(self, api_key: str) -> None:
+ self.api_key = api_key
+
+ def extract_content(self, params: dict[str, Any]) -> dict:
+ """
+ Retrieves extracted content from the Tavily Extract API.
+
+ Args:
+ params (Dict[str, Any]): The extraction parameters.
+
+ Returns:
+ dict: The extracted content.
+
+ """
+ # Ensure required parameters are set
+ if "api_key" not in params:
+ params["api_key"] = self.api_key
+
+ # Process parameters
+ processed_params = self._process_params(params)
+
+ response = requests.post(f"{TAVILY_API_URL}/extract", json=processed_params)
+ response.raise_for_status()
+ return response.json()
+
+ def _process_params(self, params: dict[str, Any]) -> dict:
+ """
+ Processes and validates the extraction parameters.
+
+ Args:
+ params (Dict[str, Any]): The extraction parameters.
+
+ Returns:
+ dict: The processed parameters.
+ """
+ processed_params = {}
+
+ # Process 'urls'
+ if "urls" in params:
+ urls = params["urls"]
+ if isinstance(urls, str):
+ processed_params["urls"] = [url.strip() for url in urls.replace(",", " ").split()]
+ elif isinstance(urls, list):
+ processed_params["urls"] = urls
+ else:
+ raise ValueError("The 'urls' parameter is required.")
+
+ # Only include 'api_key'
+ processed_params["api_key"] = params.get("api_key", self.api_key)
+
+ return processed_params
+
+
+class TavilyExtractTool(BuiltinTool):
+ """
+ A tool for extracting content from web pages using Tavily Extract.
+ """
+
+ def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]:
+ """
+ Invokes the Tavily Extract tool with the given user ID and tool parameters.
+
+ Args:
+ user_id (str): The ID of the user invoking the tool.
+ tool_parameters (Dict[str, Any]): The parameters for the Tavily Extract tool.
+
+ Returns:
+ ToolInvokeMessage | list[ToolInvokeMessage]: The result of the Tavily Extract tool invocation.
+ """
+ urls = tool_parameters.get("urls", "")
+ api_key = self.runtime.credentials.get("tavily_api_key")
+ if not api_key:
+ return self.create_text_message(
+ "Tavily API key is missing. Please set the 'tavily_api_key' in credentials."
+ )
+ if not urls:
+ return self.create_text_message("Please input at least one URL to extract.")
+
+ tavily_extract = TavilyExtract(api_key)
+ try:
+ raw_results = tavily_extract.extract_content(tool_parameters)
+ except requests.HTTPError as e:
+ return self.create_text_message(f"Error occurred while extracting content: {str(e)}")
+
+ if not raw_results.get("results"):
+ return self.create_text_message("No content could be extracted from the provided URLs.")
+ else:
+ # Always return JSON message with all data
+ json_message = self.create_json_message(raw_results)
+
+ # Create text message based on user-selected parameters
+ text_message_content = self._format_results_as_text(raw_results)
+ text_message = self.create_text_message(text=text_message_content)
+
+ return [json_message, text_message]
+
+ def _format_results_as_text(self, raw_results: dict) -> str:
+ """
+ Formats the raw extraction results into a markdown text based on user-selected parameters.
+
+ Args:
+ raw_results (dict): The raw extraction results.
+
+ Returns:
+ str: The formatted markdown text.
+ """
+ output_lines = []
+
+ for idx, result in enumerate(raw_results.get("results", []), 1):
+ url = result.get("url", "")
+ raw_content = result.get("raw_content", "")
+
+ output_lines.append(f"## Extracted Content {idx}: {url}\n")
+ output_lines.append(f"**Raw Content:**\n{raw_content}\n")
+ output_lines.append("---\n")
+
+ if raw_results.get("failed_results"):
+ output_lines.append("## Failed URLs:\n")
+ for failed in raw_results["failed_results"]:
+ url = failed.get("url", "")
+ error = failed.get("error", "Unknown error")
+ output_lines.append(f"- {url}: {error}\n")
+
+ return "\n".join(output_lines)
diff --git a/api/core/tools/provider/builtin/tavily/tools/tavily_extract.yaml b/api/core/tools/provider/builtin/tavily/tools/tavily_extract.yaml
new file mode 100644
index 0000000000..a04da73b54
--- /dev/null
+++ b/api/core/tools/provider/builtin/tavily/tools/tavily_extract.yaml
@@ -0,0 +1,23 @@
+identity:
+ name: tavily_extract
+ author: Kalo Chin
+ label:
+ en_US: Tavily Extract
+ zh_Hans: Tavily Extract
+description:
+ human:
+ en_US: A web extraction tool built specifically for AI agents (LLMs), delivering raw content from web pages.
+ zh_Hans: 专为人工智能代理 (LLM) 构建的网页提取工具,提供网页的原始内容。
+ llm: A tool for extracting raw content from web pages, designed for AI agents (LLMs).
+parameters:
+ - name: urls
+ type: string
+ required: true
+ label:
+ en_US: URLs
+ zh_Hans: URLs
+ human_description:
+ en_US: A comma-separated list of URLs to extract content from.
+ zh_Hans: 要从中提取内容的 URL 的逗号分隔列表。
+ llm_description: A comma-separated list of URLs to extract content from.
+ form: llm
diff --git a/api/core/tools/provider/builtin/tavily/tools/tavily_search.py b/api/core/tools/provider/builtin/tavily/tools/tavily_search.py
index ca6d8633e4..ea41ea3ca3 100644
--- a/api/core/tools/provider/builtin/tavily/tools/tavily_search.py
+++ b/api/core/tools/provider/builtin/tavily/tools/tavily_search.py
@@ -17,8 +17,6 @@ class TavilySearch:
Methods:
raw_results: Retrieves raw search results from the Tavily Search API.
- results: Retrieves cleaned search results from the Tavily Search API.
- clean_results: Cleans the raw search results.
"""
def __init__(self, api_key: str) -> None:
@@ -35,63 +33,62 @@ class TavilySearch:
dict: The raw search results.
"""
+ # Ensure required parameters are set
params["api_key"] = self.api_key
- if (
- "exclude_domains" in params
- and isinstance(params["exclude_domains"], str)
- and params["exclude_domains"] != "None"
- ):
- params["exclude_domains"] = params["exclude_domains"].split()
- else:
- params["exclude_domains"] = []
- if (
- "include_domains" in params
- and isinstance(params["include_domains"], str)
- and params["include_domains"] != "None"
- ):
- params["include_domains"] = params["include_domains"].split()
- else:
- params["include_domains"] = []
- response = requests.post(f"{TAVILY_API_URL}/search", json=params)
+ # Process parameters to ensure correct types
+ processed_params = self._process_params(params)
+
+ response = requests.post(f"{TAVILY_API_URL}/search", json=processed_params)
response.raise_for_status()
return response.json()
- def results(self, params: dict[str, Any]) -> list[dict]:
+ def _process_params(self, params: dict[str, Any]) -> dict:
"""
- Retrieves cleaned search results from the Tavily Search API.
+ Processes and validates the search parameters.
Args:
params (Dict[str, Any]): The search parameters.
Returns:
- list: The cleaned search results.
-
+ dict: The processed parameters.
"""
- raw_search_results = self.raw_results(params)
- return self.clean_results(raw_search_results["results"])
+ processed_params = {}
- def clean_results(self, results: list[dict]) -> list[dict]:
- """
- Cleans the raw search results.
+ for key, value in params.items():
+ if value is None or value == "None":
+ continue
+ if key in ["include_domains", "exclude_domains"]:
+ if isinstance(value, str):
+ # Split the string by commas or spaces and strip whitespace
+ processed_params[key] = [domain.strip() for domain in value.replace(",", " ").split()]
+ elif key in ["include_images", "include_image_descriptions", "include_answer", "include_raw_content"]:
+ # Ensure boolean type
+ if isinstance(value, str):
+ processed_params[key] = value.lower() == "true"
+ else:
+ processed_params[key] = bool(value)
+ elif key in ["max_results", "days"]:
+ if isinstance(value, str):
+ processed_params[key] = int(value)
+ else:
+ processed_params[key] = value
+ elif key in ["search_depth", "topic", "query", "api_key"]:
+ processed_params[key] = value
+ else:
+ # Unrecognized parameter
+ pass
- Args:
- results (list): The raw search results.
+ # Set defaults if not present
+ processed_params.setdefault("search_depth", "basic")
+ processed_params.setdefault("topic", "general")
+ processed_params.setdefault("max_results", 5)
- Returns:
- list: The cleaned search results.
+ # If topic is 'news', ensure 'days' is set
+ if processed_params.get("topic") == "news":
+ processed_params.setdefault("days", 3)
- """
- clean_results = []
- for result in results:
- clean_results.append(
- {
- "url": result["url"],
- "content": result["content"],
- }
- )
- # return clean results as a string
- return "\n".join([f"{res['url']}\n{res['content']}" for res in clean_results])
+ return processed_params
class TavilySearchTool(BuiltinTool):
@@ -111,14 +108,88 @@ class TavilySearchTool(BuiltinTool):
ToolInvokeMessage | list[ToolInvokeMessage]: The result of the Tavily search tool invocation.
"""
query = tool_parameters.get("query", "")
-
- api_key = self.runtime.credentials["tavily_api_key"]
+ api_key = self.runtime.credentials.get("tavily_api_key")
+ if not api_key:
+ return self.create_text_message(
+ "Tavily API key is missing. Please set the 'tavily_api_key' in credentials."
+ )
if not query:
- return self.create_text_message("Please input query")
+ return self.create_text_message("Please input a query.")
+
tavily_search = TavilySearch(api_key)
- results = tavily_search.results(tool_parameters)
- print(results)
- if not results:
- return self.create_text_message(f"No results found for '{query}' in Tavily")
+ try:
+ raw_results = tavily_search.raw_results(tool_parameters)
+ except requests.HTTPError as e:
+ return self.create_text_message(f"Error occurred while searching: {str(e)}")
+
+ if not raw_results.get("results"):
+ return self.create_text_message(f"No results found for '{query}' in Tavily.")
else:
- return self.create_text_message(text=results)
+ # Always return JSON message with all data
+ json_message = self.create_json_message(raw_results)
+
+ # Create text message based on user-selected parameters
+ text_message_content = self._format_results_as_text(raw_results, tool_parameters)
+ text_message = self.create_text_message(text=text_message_content)
+
+ return [json_message, text_message]
+
+ def _format_results_as_text(self, raw_results: dict, tool_parameters: dict[str, Any]) -> str:
+ """
+ Formats the raw results into a markdown text based on user-selected parameters.
+
+ Args:
+ raw_results (dict): The raw search results.
+ tool_parameters (dict): The tool parameters selected by the user.
+
+ Returns:
+ str: The formatted markdown text.
+ """
+ output_lines = []
+
+ # Include answer if requested
+ if tool_parameters.get("include_answer", False) and raw_results.get("answer"):
+ output_lines.append(f"**Answer:** {raw_results['answer']}\n")
+
+ # Include images if requested
+ if tool_parameters.get("include_images", False) and raw_results.get("images"):
+ output_lines.append("**Images:**\n")
+ for image in raw_results["images"]:
+ if tool_parameters.get("include_image_descriptions", False) and "description" in image:
+ output_lines.append(f"![{image['description']}]({image['url']})\n")
+ else:
+ output_lines.append(f"\n")
+
+ # Process each result
+ if "results" in raw_results:
+ for idx, result in enumerate(raw_results["results"], 1):
+ title = result.get("title", "No Title")
+ url = result.get("url", "")
+ content = result.get("content", "")
+ published_date = result.get("published_date", "")
+ score = result.get("score", "")
+
+ output_lines.append(f"### Result {idx}: [{title}]({url})\n")
+
+ # Include published date if available and topic is 'news'
+ if tool_parameters.get("topic") == "news" and published_date:
+ output_lines.append(f"**Published Date:** {published_date}\n")
+
+ output_lines.append(f"**URL:** {url}\n")
+
+ # Include score (relevance)
+ if score:
+ output_lines.append(f"**Relevance Score:** {score}\n")
+
+ # Include content
+ if content:
+ output_lines.append(f"**Content:**\n{content}\n")
+
+ # Include raw content if requested
+ if tool_parameters.get("include_raw_content", False) and result.get("raw_content"):
+ output_lines.append(f"**Raw Content:**\n{result['raw_content']}\n")
+
+ # Add a separator
+ output_lines.append("---\n")
+
+ return "\n".join(output_lines)
diff --git a/api/core/tools/provider/builtin/tavily/tools/tavily_search.yaml b/api/core/tools/provider/builtin/tavily/tools/tavily_search.yaml
index 88426056af..14b2829701 100644
--- a/api/core/tools/provider/builtin/tavily/tools/tavily_search.yaml
+++ b/api/core/tools/provider/builtin/tavily/tools/tavily_search.yaml
@@ -2,28 +2,24 @@ identity:
name: tavily_search
author: Yash Parmar
label:
- en_US: TavilySearch
- zh_Hans: TavilySearch
- pt_BR: TavilySearch
+ en_US: Tavily Search
+ zh_Hans: Tavily Search
description:
human:
- en_US: A tool for search engine built specifically for AI agents (LLMs), delivering real-time, accurate, and factual results at speed.
+ en_US: A search engine tool built specifically for AI agents (LLMs), delivering real-time, accurate, and factual results at speed.
zh_Hans: 专为人工智能代理 (LLM) 构建的搜索引擎工具,可快速提供实时、准确和真实的结果。
- pt_BR: A tool for search engine built specifically for AI agents (LLMs), delivering real-time, accurate, and factual results at speed.
llm: A tool for search engine built specifically for AI agents (LLMs), delivering real-time, accurate, and factual results at speed.
parameters:
- name: query
type: string
required: true
label:
- en_US: Query string
- zh_Hans: 查询语句
- pt_BR: Query string
+ en_US: Query
+ zh_Hans: 查询
human_description:
- en_US: used for searching
- zh_Hans: 用于搜索网页内容
- pt_BR: used for searching
- llm_description: key words for searching
+ en_US: The search query you want to execute with Tavily.
+ zh_Hans: 您想用 Tavily 执行的搜索查询。
+ llm_description: The search query.
form: llm
- name: search_depth
type: select
@@ -31,122 +27,118 @@ parameters:
label:
en_US: Search Depth
zh_Hans: 搜索深度
- pt_BR: Search Depth
human_description:
- en_US: The depth of search results
- zh_Hans: 搜索结果的深度
- pt_BR: The depth of search results
+ en_US: The depth of the search.
+ zh_Hans: 搜索的深度。
form: form
options:
- value: basic
label:
en_US: Basic
zh_Hans: 基本
- pt_BR: Basic
- value: advanced
label:
en_US: Advanced
zh_Hans: 高级
- pt_BR: Advanced
default: basic
+ - name: topic
+ type: select
+ required: false
+ label:
+ en_US: Topic
+ zh_Hans: 主题
+ human_description:
+ en_US: The category of the search.
+ zh_Hans: 搜索的类别。
+ form: form
+ options:
+ - value: general
+ label:
+ en_US: General
+ zh_Hans: 一般
+ - value: news
+ label:
+ en_US: News
+ zh_Hans: 新闻
+ default: general
+ - name: days
+ type: number
+ required: false
+ label:
+ en_US: Days
+ zh_Hans: 天数
+ human_description:
+ en_US: The number of days back from the current date to include in the search results (only applicable when "topic" is "news").
+ zh_Hans: 从当前日期起向前追溯的天数,以包含在搜索结果中(仅当“topic”为“news”时适用)。
+ form: form
+ min: 1
+ default: 3
+ - name: max_results
+ type: number
+ required: false
+ label:
+ en_US: Max Results
+ zh_Hans: 最大结果数
+ human_description:
+ en_US: The maximum number of search results to return.
+ zh_Hans: 要返回的最大搜索结果数。
+ form: form
+ min: 1
+ max: 20
+ default: 5
- name: include_images
type: boolean
required: false
label:
en_US: Include Images
zh_Hans: 包含图片
- pt_BR: Include Images
human_description:
- en_US: Include images in the search results
- zh_Hans: 在搜索结果中包含图片
- pt_BR: Include images in the search results
+ en_US: Include a list of query-related images in the response.
+ zh_Hans: 在响应中包含与查询相关的图片列表。
form: form
- options:
- - value: 'true'
- label:
- en_US: 'Yes'
- zh_Hans: 是
- pt_BR: 'Yes'
- - value: 'false'
- label:
- en_US: 'No'
- zh_Hans: 否
- pt_BR: 'No'
- default: 'false'
+ default: false
+ - name: include_image_descriptions
+ type: boolean
+ required: false
+ label:
+ en_US: Include Image Descriptions
+ zh_Hans: 包含图片描述
+ human_description:
+ en_US: When include_images is True, adds descriptive text for each image.
+ zh_Hans: 当 include_images 为 True 时,为每个图像添加描述文本。
+ form: form
+ default: false
- name: include_answer
type: boolean
required: false
label:
en_US: Include Answer
zh_Hans: 包含答案
- pt_BR: Include Answer
human_description:
- en_US: Include answers in the search results
- zh_Hans: 在搜索结果中包含答案
- pt_BR: Include answers in the search results
+ en_US: Include a short answer to the original query in the response.
+ zh_Hans: 在响应中包含对原始查询的简短回答。
form: form
- options:
- - value: 'true'
- label:
- en_US: 'Yes'
- zh_Hans: 是
- pt_BR: 'Yes'
- - value: 'false'
- label:
- en_US: 'No'
- zh_Hans: 否
- pt_BR: 'No'
- default: 'false'
+ default: false
- name: include_raw_content
type: boolean
required: false
label:
en_US: Include Raw Content
zh_Hans: 包含原始内容
- pt_BR: Include Raw Content
human_description:
- en_US: Include raw content in the search results
- zh_Hans: 在搜索结果中包含原始内容
- pt_BR: Include raw content in the search results
+ en_US: Include the cleaned and parsed HTML content of each search result.
+ zh_Hans: 包含每个搜索结果的已清理和解析的HTML内容。
form: form
- options:
- - value: 'true'
- label:
- en_US: 'Yes'
- zh_Hans: 是
- pt_BR: 'Yes'
- - value: 'false'
- label:
- en_US: 'No'
- zh_Hans: 否
- pt_BR: 'No'
- default: 'false'
- - name: max_results
- type: number
- required: false
- label:
- en_US: Max Results
- zh_Hans: 最大结果
- pt_BR: Max Results
- human_description:
- en_US: The number of maximum search results to return
- zh_Hans: 返回的最大搜索结果数
- pt_BR: The number of maximum search results to return
- form: form
- min: 1
- max: 20
- default: 5
+ default: false
- name: include_domains
type: string
required: false
label:
en_US: Include Domains
zh_Hans: 包含域
- pt_BR: Include Domains
human_description:
- en_US: A list of domains to specifically include in the search results
- zh_Hans: 在搜索结果中特别包含的域名列表
- pt_BR: A list of domains to specifically include in the search results
+ en_US: A comma-separated list of domains to specifically include in the search results.
+ zh_Hans: 要在搜索结果中特别包含的域的逗号分隔列表。
form: form
- name: exclude_domains
type: string
@@ -154,9 +146,7 @@ parameters:
label:
en_US: Exclude Domains
zh_Hans: 排除域
- pt_BR: Exclude Domains
human_description:
- en_US: A list of domains to specifically exclude from the search results
- zh_Hans: 从搜索结果中特别排除的域名列表
- pt_BR: A list of domains to specifically exclude from the search results
+ en_US: A comma-separated list of domains to specifically exclude from the search results.
+ zh_Hans: 要从搜索结果中特别排除的域的逗号分隔列表。
form: form
diff --git a/api/core/tools/provider/builtin/transcript/_assets/icon.svg b/api/core/tools/provider/builtin/transcript/_assets/icon.svg
new file mode 100644
index 0000000000..83b0700fec
--- /dev/null
+++ b/api/core/tools/provider/builtin/transcript/_assets/icon.svg
@@ -0,0 +1,11 @@
+
+
\ No newline at end of file
diff --git a/api/core/tools/provider/builtin/transcript/tools/transcript.py b/api/core/tools/provider/builtin/transcript/tools/transcript.py
new file mode 100644
index 0000000000..27f700efbd
--- /dev/null
+++ b/api/core/tools/provider/builtin/transcript/tools/transcript.py
@@ -0,0 +1,81 @@
+from typing import Any, Union
+from urllib.parse import parse_qs, urlparse
+
+from youtube_transcript_api import YouTubeTranscriptApi
+
+from core.tools.entities.tool_entities import ToolInvokeMessage
+from core.tools.tool.builtin_tool import BuiltinTool
+
+
+class YouTubeTranscriptTool(BuiltinTool):
+ def _invoke(
+ self, user_id: str, tool_parameters: dict[str, Any]
+ ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
+ """
+ Invoke the YouTube transcript tool
+ """
+ try:
+ # Extract parameters with defaults
+ video_input = tool_parameters["video_id"]
+ language = tool_parameters.get("language")
+ output_format = tool_parameters.get("format", "text")
+ preserve_formatting = tool_parameters.get("preserve_formatting", False)
+ proxy = tool_parameters.get("proxy")
+ cookies = tool_parameters.get("cookies")
+
+ # Extract video ID from URL if needed
+ video_id = self._extract_video_id(video_input)
+
+ # Common kwargs for API calls
+ kwargs = {"proxies": {"https": proxy} if proxy else None, "cookies": cookies}
+
+ try:
+ if language:
+ transcript_list = YouTubeTranscriptApi.list_transcripts(video_id, **kwargs)
+ try:
+ transcript = transcript_list.find_transcript([language])
+ except:
+ # If requested language not found, try translating from English
+ transcript = transcript_list.find_transcript(["en"]).translate(language)
+ transcript_data = transcript.fetch()
+ else:
+ transcript_data = YouTubeTranscriptApi.get_transcript(
+ video_id, preserve_formatting=preserve_formatting, **kwargs
+ )
+
+ # Format output
+ formatter_class = {
+ "json": "JSONFormatter",
+ "pretty": "PrettyPrintFormatter",
+ "srt": "SRTFormatter",
+ "vtt": "WebVTTFormatter",
+ }.get(output_format)
+
+ if formatter_class:
+ from youtube_transcript_api import formatters
+
+ formatter = getattr(formatters, formatter_class)()
+ formatted_transcript = formatter.format_transcript(transcript_data)
+ else:
+ formatted_transcript = " ".join(entry["text"] for entry in transcript_data)
+
+ return self.create_text_message(text=formatted_transcript)
+
+ except Exception as e:
+ return self.create_text_message(text=f"Error getting transcript: {str(e)}")
+
+ except Exception as e:
+ return self.create_text_message(text=f"Error processing request: {str(e)}")
+
+ def _extract_video_id(self, video_input: str) -> str:
+ """
+ Extract video ID from URL or return as-is if already an ID
+ """
+ if "youtube.com" in video_input or "youtu.be" in video_input:
+ # Parse URL
+ parsed_url = urlparse(video_input)
+ if "youtube.com" in parsed_url.netloc:
+ return parse_qs(parsed_url.query)["v"][0]
+ else: # youtu.be
+ return parsed_url.path[1:]
+ return video_input # Assume it's already a video ID
diff --git a/api/core/tools/provider/builtin/transcript/tools/transcript.yaml b/api/core/tools/provider/builtin/transcript/tools/transcript.yaml
new file mode 100644
index 0000000000..c654634a6c
--- /dev/null
+++ b/api/core/tools/provider/builtin/transcript/tools/transcript.yaml
@@ -0,0 +1,101 @@
+identity:
+ name: free_youtube_transcript
+ author: Tao Wang
+ label:
+ en_US: Free YouTube Transcript API
+ zh_Hans: 免费获取 YouTube 转录
+description:
+ human:
+ en_US: Get transcript from a YouTube video for free.
+ zh_Hans: 免费获取 YouTube 视频的转录文案。
+ llm: A tool for retrieving transcript from YouTube videos.
+parameters:
+ - name: video_id
+ type: string
+ required: true
+ label:
+ en_US: Video ID/URL
+ zh_Hans: 视频ID
+ human_description:
+ en_US: Used to define the video from which the transcript will be fetched. You can find the id in the video url. For example - https://www.youtube.com/watch?v=video_id.
+ zh_Hans: 您要哪条视频的转录文案?您可以在视频链接中找到id。例如 - https://www.youtube.com/watch?v=video_id。
+ llm_description: Used to define the video from which the transcript will be fetched. For example - https://www.youtube.com/watch?v=video_id.
+ form: llm
+ - name: language
+ type: string
+ required: false
+ label:
+ en_US: Language Code
+ zh_Hans: 语言
+ human_description:
+ en_US: Language code (e.g. 'en', 'zh') for the transcript.
+ zh_Hans: 字幕语言代码(如'en'、'zh')。留空则自动选择。
+ llm_description: Used to set the language for transcripts.
+ form: form
+ - name: format
+ type: select
+ required: false
+ default: text
+ options:
+ - value: text
+ label:
+ en_US: Plain Text
+ zh_Hans: 纯文本
+ - value: json
+ label:
+ en_US: JSON Format
+ zh_Hans: JSON 格式
+ - value: pretty
+ label:
+ en_US: Pretty Print Format
+ zh_Hans: 美化格式
+ - value: srt
+ label:
+ en_US: SRT Format
+ zh_Hans: SRT 格式
+ - value: vtt
+ label:
+ en_US: WebVTT Format
+ zh_Hans: WebVTT 格式
+ label:
+ en_US: Output Format
+ zh_Hans: 输出格式
+ human_description:
+ en_US: Format of the transcript output
+ zh_Hans: 字幕输出格式
+ llm_description: The format to output the transcript in. Options are text (plain text), json (raw transcript data), srt (SubRip format), or vtt (WebVTT format)
+ form: form
+ - name: preserve_formatting
+ type: boolean
+ required: false
+ default: false
+ label:
+ en_US: Preserve Formatting
+ zh_Hans: 保留格式
+ human_description:
+ en_US: Keep HTML formatting elements like (italics) and (bold)
+ zh_Hans: 保留HTML格式元素,如(斜体)和(粗体)
+ llm_description: Whether to preserve HTML formatting elements in the transcript text
+ form: form
+ - name: proxy
+ type: string
+ required: false
+ label:
+ en_US: HTTPS Proxy
+ zh_Hans: HTTPS 代理
+ human_description:
+ en_US: HTTPS proxy URL (e.g. https://user:pass@domain:port)
+ zh_Hans: HTTPS 代理地址(如 https://user:pass@domain:port)
+ llm_description: HTTPS proxy to use for the request. Format should be https://user:pass@domain:port
+ form: form
+ - name: cookies
+ type: string
+ required: false
+ label:
+ en_US: Cookies File Path
+ zh_Hans: Cookies 文件路径
+ human_description:
+ en_US: Path to cookies.txt file for accessing age-restricted videos
+ zh_Hans: 用于访问年龄限制视频的 cookies.txt 文件路径
+ llm_description: Path to a cookies.txt file containing YouTube cookies, needed for accessing age-restricted videos
+ form: form
diff --git a/api/core/tools/provider/builtin/transcript/transcript.py b/api/core/tools/provider/builtin/transcript/transcript.py
new file mode 100644
index 0000000000..4fda149988
--- /dev/null
+++ b/api/core/tools/provider/builtin/transcript/transcript.py
@@ -0,0 +1,11 @@
+from typing import Any
+
+from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController
+
+
+class YouTubeTranscriptProvider(BuiltinToolProviderController):
+ def _validate_credentials(self, credentials: dict[str, Any]) -> None:
+ """
+ No credentials needed for YouTube Transcript API
+ """
+ pass
diff --git a/api/core/tools/provider/builtin/transcript/transcript.yaml b/api/core/tools/provider/builtin/transcript/transcript.yaml
new file mode 100644
index 0000000000..0786b454c3
--- /dev/null
+++ b/api/core/tools/provider/builtin/transcript/transcript.yaml
@@ -0,0 +1,13 @@
+identity:
+ author: Tao Wang
+ name: transcript
+ label:
+ en_US: Transcript
+ zh_Hans: Transcript
+ description:
+ en_US: Get transcripts from YouTube videos
+ zh_Hans: 获取 YouTube 视频的字幕/转录文本
+ icon: icon.svg
+ tags:
+ - videos
+credentials_for_provider:
diff --git a/api/core/tools/tool/api_tool.py b/api/core/tools/tool/api_tool.py
index c779d704c3..0b4c5bd2c6 100644
--- a/api/core/tools/tool/api_tool.py
+++ b/api/core/tools/tool/api_tool.py
@@ -5,6 +5,7 @@ from urllib.parse import urlencode
import httpx
+from core.file.file_manager import download
from core.helper import ssrf_proxy
from core.tools.entities.tool_bundle import ApiToolBundle
from core.tools.entities.tool_entities import ToolInvokeMessage, ToolProviderType
@@ -138,6 +139,7 @@ class ApiTool(Tool):
path_params = {}
body = {}
cookies = {}
+ files = []
# check parameters
for parameter in self.api_bundle.openapi.get("parameters", []):
@@ -166,8 +168,12 @@ class ApiTool(Tool):
properties = body_schema.get("properties", {})
for name, property in properties.items():
if name in parameters:
- # convert type
- body[name] = self._convert_body_property_type(property, parameters[name])
+ if property.get("format") == "binary":
+ f = parameters[name]
+ files.append((name, (f.filename, download(f), f.mime_type)))
+ else:
+ # convert type
+ body[name] = self._convert_body_property_type(property, parameters[name])
elif name in required:
raise ToolParameterValidationError(
f"Missing required parameter {name} in operation {self.api_bundle.operation_id}"
@@ -182,7 +188,7 @@ class ApiTool(Tool):
for name, value in path_params.items():
url = url.replace(f"{{{name}}}", f"{value}")
- # parse http body data if needed, for GET/HEAD/OPTIONS/TRACE, the body is ignored
+ # parse http body data if needed
if "Content-Type" in headers:
if headers["Content-Type"] == "application/json":
body = json.dumps(body)
@@ -198,6 +204,7 @@ class ApiTool(Tool):
headers=headers,
cookies=cookies,
data=body,
+ files=files,
timeout=API_TOOL_DEFAULT_TIMEOUT,
follow_redirects=True,
)
diff --git a/api/core/tools/tool/workflow_tool.py b/api/core/tools/tool/workflow_tool.py
index 2ab72213ff..721fa06c54 100644
--- a/api/core/tools/tool/workflow_tool.py
+++ b/api/core/tools/tool/workflow_tool.py
@@ -175,7 +175,7 @@ class WorkflowTool(Tool):
files.append(file_dict)
except Exception as e:
- logger.exception(e)
+ logger.exception(f"Failed to transform file {file}")
else:
parameters_result[parameter.name] = tool_parameters.get(parameter.name)
diff --git a/api/core/tools/tool_file_manager.py b/api/core/tools/tool_file_manager.py
index ff56e20e87..5052f0897a 100644
--- a/api/core/tools/tool_file_manager.py
+++ b/api/core/tools/tool_file_manager.py
@@ -98,7 +98,7 @@ class ToolFileManager:
response.raise_for_status()
blob = response.content
except Exception as e:
- logger.exception(f"Failed to download file from {file_url}: {e}")
+ logger.exception(f"Failed to download file from {file_url}")
raise
mimetype = guess_type(file_url)[0] or "octet/stream"
diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py
index d2723df7b2..ac333162b6 100644
--- a/api/core/tools/tool_manager.py
+++ b/api/core/tools/tool_manager.py
@@ -388,7 +388,7 @@ class ToolManager:
yield provider
except Exception as e:
- logger.exception(f"load builtin provider {provider} error: {e}")
+ logger.exception(f"load builtin provider {provider}")
continue
# set builtin providers loaded
cls._builtin_providers_loaded = True
diff --git a/api/core/tools/utils/message_transformer.py b/api/core/tools/utils/message_transformer.py
index 1812d24571..e30c903a4b 100644
--- a/api/core/tools/utils/message_transformer.py
+++ b/api/core/tools/utils/message_transformer.py
@@ -40,7 +40,7 @@ class ToolFileMessageTransformer:
)
)
except Exception as e:
- logger.exception(e)
+ logger.exception(f"Failed to download image from {url}")
result.append(
ToolInvokeMessage(
type=ToolInvokeMessage.MessageType.TEXT,
diff --git a/api/core/tools/utils/parser.py b/api/core/tools/utils/parser.py
index 5867a11bb3..ae44b1b99d 100644
--- a/api/core/tools/utils/parser.py
+++ b/api/core/tools/utils/parser.py
@@ -161,6 +161,9 @@ class ApiBasedToolSchemaParser:
def _get_tool_parameter_type(parameter: dict) -> ToolParameter.ToolParameterType:
parameter = parameter or {}
typ = None
+ if parameter.get("format") == "binary":
+ return ToolParameter.ToolParameterType.FILE
+
if "type" in parameter:
typ = parameter["type"]
elif "schema" in parameter and "type" in parameter["schema"]:
diff --git a/api/core/tools/utils/text_processing_utils.py b/api/core/tools/utils/text_processing_utils.py
new file mode 100644
index 0000000000..6db9dfd0d9
--- /dev/null
+++ b/api/core/tools/utils/text_processing_utils.py
@@ -0,0 +1,16 @@
+import re
+
+
+def remove_leading_symbols(text: str) -> str:
+ """
+ Remove leading punctuation or symbols from the given text.
+
+ Args:
+ text (str): The input text to process.
+
+ Returns:
+ str: The text with leading punctuation or symbols removed.
+ """
+ # Match Unicode ranges for punctuation and symbols
+ pattern = r"^[\u2000-\u206F\u2E00-\u2E7F\u3000-\u303F!\"#$%&'()*+,\-./:;<=>?@\[\]^_`{|}~]+"
+ return re.sub(pattern, "", text)
diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py
index f07ad4de11..60a5901b21 100644
--- a/api/core/workflow/graph_engine/graph_engine.py
+++ b/api/core/workflow/graph_engine/graph_engine.py
@@ -172,7 +172,7 @@ class GraphEngine:
"answer"
].strip()
except Exception as e:
- logger.exception(f"Graph run failed: {str(e)}")
+ logger.exception("Graph run failed")
yield GraphRunFailedEvent(error=str(e))
return
@@ -692,7 +692,7 @@ class GraphEngine:
)
return
except Exception as e:
- logger.exception(f"Node {node_instance.node_data.title} run failed: {str(e)}")
+ logger.exception(f"Node {node_instance.node_data.title} run failed")
raise e
finally:
db.session.close()
diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py
index 1433c8eaed..1871fff618 100644
--- a/api/core/workflow/nodes/base/node.py
+++ b/api/core/workflow/nodes/base/node.py
@@ -69,7 +69,7 @@ class BaseNode(Generic[GenericNodeData]):
try:
result = self._run()
except Exception as e:
- logger.exception(f"Node {self.node_id} failed to run: {e}")
+ logger.exception(f"Node {self.node_id} failed to run")
result = NodeRunResult(
status=WorkflowNodeExecutionStatus.FAILED,
error=str(e),
diff --git a/api/core/workflow/nodes/http_request/entities.py b/api/core/workflow/nodes/http_request/entities.py
index 36ded104c1..5e39ef79d1 100644
--- a/api/core/workflow/nodes/http_request/entities.py
+++ b/api/core/workflow/nodes/http_request/entities.py
@@ -1,4 +1,6 @@
+import mimetypes
from collections.abc import Sequence
+from email.message import Message
from typing import Any, Literal, Optional
import httpx
@@ -7,14 +9,6 @@ from pydantic import BaseModel, Field, ValidationInfo, field_validator
from configs import dify_config
from core.workflow.nodes.base import BaseNodeData
-NON_FILE_CONTENT_TYPES = (
- "application/json",
- "application/xml",
- "text/html",
- "text/plain",
- "application/x-www-form-urlencoded",
-)
-
class HttpRequestNodeAuthorizationConfig(BaseModel):
type: Literal["basic", "bearer", "custom"]
@@ -93,13 +87,53 @@ class Response:
@property
def is_file(self):
- content_type = self.content_type
+ """
+ Determine if the response contains a file by checking:
+ 1. Content-Disposition header (RFC 6266)
+ 2. Content characteristics
+ 3. MIME type analysis
+ """
+ content_type = self.content_type.split(";")[0].strip().lower()
content_disposition = self.response.headers.get("content-disposition", "")
- return "attachment" in content_disposition or (
- not any(non_file in content_type for non_file in NON_FILE_CONTENT_TYPES)
- and any(file_type in content_type for file_type in ("application/", "image/", "audio/", "video/"))
- )
+ # Check if it's explicitly marked as an attachment
+ if content_disposition:
+ msg = Message()
+ msg["content-disposition"] = content_disposition
+ disp_type = msg.get_content_disposition() # Returns 'attachment', 'inline', or None
+ filename = msg.get_filename() # Returns filename if present, None otherwise
+ if disp_type == "attachment" or filename is not None:
+ return True
+
+ # For application types, try to detect if it's a text-based format
+ if content_type.startswith("application/"):
+ # Common text-based application types
+ if any(
+ text_type in content_type
+ for text_type in ("json", "xml", "javascript", "x-www-form-urlencoded", "yaml", "graphql")
+ ):
+ return False
+
+ # Try to detect if content is text-based by sampling first few bytes
+ try:
+ # Sample first 1024 bytes for text detection
+ content_sample = self.response.content[:1024]
+ content_sample.decode("utf-8")
+ # If we can decode as UTF-8 and find common text patterns, likely not a file
+ text_markers = (b"{", b"[", b"<", b"function", b"var ", b"const ", b"let ")
+ if any(marker in content_sample for marker in text_markers):
+ return False
+ except UnicodeDecodeError:
+ # If we can't decode as UTF-8, likely a binary file
+ return True
+
+ # For other types, use MIME type analysis
+ main_type, _ = mimetypes.guess_type("dummy" + (mimetypes.guess_extension(content_type) or ""))
+ if main_type:
+ return main_type.split("/")[0] in ("application", "image", "audio", "video")
+
+ # For unknown types, check if it's a media type
+ return any(media_type in content_type for media_type in ("image/", "audio/", "video/"))
@property
def content_type(self) -> str:
diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py
index 42012eee8e..1b78e36a57 100644
--- a/api/extensions/ext_celery.py
+++ b/api/extensions/ext_celery.py
@@ -68,6 +68,7 @@ def init_app(app: Flask) -> Celery:
"schedule.clean_unused_datasets_task",
"schedule.create_tidb_serverless_task",
"schedule.update_tidb_serverless_status_task",
+ "schedule.clean_messages",
]
day = dify_config.CELERY_BEAT_SCHEDULER_TIME
beat_schedule = {
@@ -87,6 +88,10 @@ def init_app(app: Flask) -> Celery:
"task": "schedule.update_tidb_serverless_status_task.update_tidb_serverless_status_task",
"schedule": crontab(minute="30", hour="*"),
},
+ "clean_messages": {
+ "task": "schedule.clean_messages.clean_messages",
+ "schedule": timedelta(days=day),
+ },
}
celery_app.conf.update(beat_schedule=beat_schedule, imports=imports)
diff --git a/api/extensions/ext_redis.py b/api/extensions/ext_redis.py
index e1f8409f21..36f06c1104 100644
--- a/api/extensions/ext_redis.py
+++ b/api/extensions/ext_redis.py
@@ -1,11 +1,12 @@
import redis
+from redis.cluster import ClusterNode, RedisCluster
from redis.connection import Connection, SSLConnection
from redis.sentinel import Sentinel
from configs import dify_config
-class RedisClientWrapper(redis.Redis):
+class RedisClientWrapper:
"""
A wrapper class for the Redis client that addresses the issue where the global
`redis_client` variable cannot be updated when a new Redis instance is returned
@@ -71,6 +72,12 @@ def init_app(app):
)
master = sentinel.master_for(dify_config.REDIS_SENTINEL_SERVICE_NAME, **redis_params)
redis_client.initialize(master)
+ elif dify_config.REDIS_USE_CLUSTERS:
+ nodes = [
+ ClusterNode(host=node.split(":")[0], port=int(node.split.split(":")[1]))
+ for node in dify_config.REDIS_CLUSTERS.split(",")
+ ]
+ redis_client.initialize(RedisCluster(startup_nodes=nodes, password=dify_config.REDIS_CLUSTERS_PASSWORD))
else:
redis_params.update(
{
diff --git a/api/extensions/ext_storage.py b/api/extensions/ext_storage.py
index 86fadf23d7..fa88da68b7 100644
--- a/api/extensions/ext_storage.py
+++ b/api/extensions/ext_storage.py
@@ -70,7 +70,7 @@ class Storage:
try:
self.storage_runner.save(filename, data)
except Exception as e:
- logging.exception("Failed to save file: %s", e)
+ logging.exception(f"Failed to save file {filename}")
raise e
def load(self, filename: str, /, *, stream: bool = False) -> Union[bytes, Generator]:
@@ -80,42 +80,42 @@ class Storage:
else:
return self.load_once(filename)
except Exception as e:
- logging.exception("Failed to load file: %s", e)
+ logging.exception(f"Failed to load file {filename}")
raise e
def load_once(self, filename: str) -> bytes:
try:
return self.storage_runner.load_once(filename)
except Exception as e:
- logging.exception("Failed to load_once file: %s", e)
+ logging.exception(f"Failed to load_once file {filename}")
raise e
def load_stream(self, filename: str) -> Generator:
try:
return self.storage_runner.load_stream(filename)
except Exception as e:
- logging.exception("Failed to load_stream file: %s", e)
+ logging.exception(f"Failed to load_stream file {filename}")
raise e
def download(self, filename, target_filepath):
try:
self.storage_runner.download(filename, target_filepath)
except Exception as e:
- logging.exception("Failed to download file: %s", e)
+ logging.exception(f"Failed to download file {filename}")
raise e
def exists(self, filename):
try:
return self.storage_runner.exists(filename)
except Exception as e:
- logging.exception("Failed to check file exists: %s", e)
+ logging.exception(f"Failed to check file exists {filename}")
raise e
def delete(self, filename):
try:
return self.storage_runner.delete(filename)
except Exception as e:
- logging.exception("Failed to delete file: %s", e)
+ logging.exception(f"Failed to delete file {filename}")
raise e
diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py
index 15e9d7f34f..8cb45f194b 100644
--- a/api/factories/file_factory.py
+++ b/api/factories/file_factory.py
@@ -169,6 +169,7 @@ def _get_remote_file_info(url: str):
mime_type = mimetypes.guess_type(url)[0] or ""
file_size = -1
filename = url.split("/")[-1].split("?")[0] or "unknown_file"
+ mime_type = mime_type or mimetypes.guess_type(filename)[0]
resp = ssrf_proxy.head(url, follow_redirects=True)
if resp.status_code == httpx.codes.OK:
@@ -233,10 +234,10 @@ def _is_file_valid_with_config(*, file: File, config: FileUploadConfig) -> bool:
if config.allowed_file_types and file.type not in config.allowed_file_types and file.type != FileType.CUSTOM:
return False
- if config.allowed_extensions and file.extension not in config.allowed_extensions:
+ if config.allowed_file_extensions and file.extension not in config.allowed_file_extensions:
return False
- if config.allowed_upload_methods and file.transfer_method not in config.allowed_upload_methods:
+ if config.allowed_file_upload_methods and file.transfer_method not in config.allowed_file_upload_methods:
return False
if file.type == FileType.IMAGE and config.image_config:
diff --git a/api/libs/smtp.py b/api/libs/smtp.py
index d57d99f3b7..2325d69a41 100644
--- a/api/libs/smtp.py
+++ b/api/libs/smtp.py
@@ -39,13 +39,13 @@ class SMTPClient:
smtp.sendmail(self._from, mail["to"], msg.as_string())
except smtplib.SMTPException as e:
- logging.exception(f"SMTP error occurred: {str(e)}")
+ logging.exception("SMTP error occurred")
raise
except TimeoutError as e:
- logging.exception(f"Timeout occurred while sending email: {str(e)}")
+ logging.exception("Timeout occurred while sending email")
raise
except Exception as e:
- logging.exception(f"Unexpected error occurred while sending email: {str(e)}")
+ logging.exception(f"Unexpected error occurred while sending email to {mail['to']}")
raise
finally:
if smtp:
diff --git a/api/migrations/versions/2024_11_12_0925-01d6889832f7_add_created_at_index_for_messages.py b/api/migrations/versions/2024_11_12_0925-01d6889832f7_add_created_at_index_for_messages.py
new file mode 100644
index 0000000000..d94508edcf
--- /dev/null
+++ b/api/migrations/versions/2024_11_12_0925-01d6889832f7_add_created_at_index_for_messages.py
@@ -0,0 +1,31 @@
+"""add_created_at_index_for_messages
+
+Revision ID: 01d6889832f7
+Revises: 09a8d1878d9b
+Create Date: 2024-11-12 09:25:05.527827
+
+"""
+from alembic import op
+import models as models
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '01d6889832f7'
+down_revision = '09a8d1878d9b'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table('messages', schema=None) as batch_op:
+ batch_op.create_index('message_created_at_idx', ['created_at'], unique=False)
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ with op.batch_alter_table('messages', schema=None) as batch_op:
+ batch_op.drop_index('message_created_at_idx')
+ # ### end Alembic commands ###
diff --git a/api/models/dataset.py b/api/models/dataset.py
index a1a626d7e4..a8b2c419d1 100644
--- a/api/models/dataset.py
+++ b/api/models/dataset.py
@@ -679,7 +679,7 @@ class DatasetKeywordTable(db.Model):
return json.loads(keyword_table_text.decode("utf-8"), cls=SetDecoder)
return None
except Exception as e:
- logging.exception(str(e))
+ logging.exception(f"Failed to load keyword table from file: {file_key}")
return None
diff --git a/api/models/model.py b/api/models/model.py
index e909d53e3e..b7c89ce97c 100644
--- a/api/models/model.py
+++ b/api/models/model.py
@@ -719,6 +719,7 @@ class Message(db.Model):
db.Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"),
db.Index("message_account_idx", "app_id", "from_source", "from_account_id"),
db.Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"),
+ db.Index("message_created_at_idx", "created_at"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
diff --git a/api/models/workflow.py b/api/models/workflow.py
index 4f0e9a5e03..c6b3000083 100644
--- a/api/models/workflow.py
+++ b/api/models/workflow.py
@@ -169,9 +169,9 @@ class Workflow(db.Model):
)
features["file_upload"]["enabled"] = image_enabled
features["file_upload"]["number_limits"] = image_number_limits
- features["file_upload"]["allowed_upload_methods"] = image_transfer_methods
+ features["file_upload"]["allowed_file_upload_methods"] = image_transfer_methods
features["file_upload"]["allowed_file_types"] = ["image"]
- features["file_upload"]["allowed_extensions"] = []
+ features["file_upload"]["allowed_file_extensions"] = []
del features["file_upload"]["image"]
self._features = json.dumps(features)
return self._features
diff --git a/api/poetry.lock b/api/poetry.lock
index 74c2ef5dc6..6021ae5c74 100644
--- a/api/poetry.lock
+++ b/api/poetry.lock
@@ -125,18 +125,36 @@ speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]
[[package]]
name = "aiohttp-retry"
-version = "2.9.0"
+version = "2.9.1"
description = "Simple retry client for aiohttp"
optional = false
python-versions = ">=3.7"
files = [
- {file = "aiohttp_retry-2.9.0-py3-none-any.whl", hash = "sha256:7661af92471e9a96c69d9b8f32021360272073397e6a15bc44c1726b12f46056"},
- {file = "aiohttp_retry-2.9.0.tar.gz", hash = "sha256:92c47f1580040208bac95d9a8389a87227ef22758530f2e3f4683395e42c41b5"},
+ {file = "aiohttp_retry-2.9.1-py3-none-any.whl", hash = "sha256:66d2759d1921838256a05a3f80ad7e724936f083e35be5abb5e16eed6be6dc54"},
+ {file = "aiohttp_retry-2.9.1.tar.gz", hash = "sha256:8eb75e904ed4ee5c2ec242fefe85bf04240f685391c4879d8f541d6028ff01f1"},
]
[package.dependencies]
aiohttp = "*"
+[[package]]
+name = "aiomysql"
+version = "0.2.0"
+description = "MySQL driver for asyncio."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "aiomysql-0.2.0-py3-none-any.whl", hash = "sha256:b7c26da0daf23a5ec5e0b133c03d20657276e4eae9b73e040b72787f6f6ade0a"},
+ {file = "aiomysql-0.2.0.tar.gz", hash = "sha256:558b9c26d580d08b8c5fd1be23c5231ce3aeff2dadad989540fee740253deb67"},
+]
+
+[package.dependencies]
+PyMySQL = ">=1.0"
+
+[package.extras]
+rsa = ["PyMySQL[rsa] (>=1.0)"]
+sa = ["sqlalchemy (>=1.3,<1.4)"]
+
[[package]]
name = "aiosignal"
version = "1.3.1"
@@ -153,13 +171,13 @@ frozenlist = ">=1.1.0"
[[package]]
name = "alembic"
-version = "1.13.3"
+version = "1.14.0"
description = "A database migration tool for SQLAlchemy."
optional = false
python-versions = ">=3.8"
files = [
- {file = "alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e"},
- {file = "alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2"},
+ {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"},
+ {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"},
]
[package.dependencies]
@@ -392,13 +410,13 @@ aliyun-python-sdk-core = ">=2.11.5"
[[package]]
name = "amqp"
-version = "5.2.0"
+version = "5.3.1"
description = "Low-level AMQP client for Python (fork of amqplib)."
optional = false
python-versions = ">=3.6"
files = [
- {file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"},
- {file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"},
+ {file = "amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2"},
+ {file = "amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432"},
]
[package.dependencies]
@@ -553,13 +571,13 @@ cryptography = "*"
[[package]]
name = "azure-ai-inference"
-version = "1.0.0b5"
-description = "Microsoft Azure Ai Inference Client Library for Python"
+version = "1.0.0b6"
+description = "Microsoft Azure AI Inference Client Library for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "azure_ai_inference-1.0.0b5-py3-none-any.whl", hash = "sha256:0147653088033f1fd059d5f4bd0fedac82529fdcc7a0d2183d9508b3f80cf549"},
- {file = "azure_ai_inference-1.0.0b5.tar.gz", hash = "sha256:c95b490bcd670ccdeb1048dc2b45e0f8252a4d69a348ca15d4510d327b64dd0d"},
+ {file = "azure_ai_inference-1.0.0b6-py3-none-any.whl", hash = "sha256:5699ad78d70ec2d227a5eff2c1bafc845018f6624edc5b03589dfff861c54958"},
+ {file = "azure_ai_inference-1.0.0b6.tar.gz", hash = "sha256:b8ac941de1e69151bad464191e18856d4e74f962ae03235da137a9a326143676"},
]
[package.dependencies]
@@ -569,6 +587,7 @@ typing-extensions = ">=4.6.0"
[package.extras]
opentelemetry = ["azure-core-tracing-opentelemetry"]
+prompts = ["pyyaml"]
[[package]]
name = "azure-ai-ml"
@@ -619,13 +638,13 @@ files = [
[[package]]
name = "azure-core"
-version = "1.31.0"
+version = "1.32.0"
description = "Microsoft Azure Core Library for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "azure_core-1.31.0-py3-none-any.whl", hash = "sha256:22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd"},
- {file = "azure_core-1.31.0.tar.gz", hash = "sha256:656a0dd61e1869b1506b7c6a3b31d62f15984b1a573d6326f6aa2f3e4123284b"},
+ {file = "azure_core-1.32.0-py3-none-any.whl", hash = "sha256:eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4"},
+ {file = "azure_core-1.32.0.tar.gz", hash = "sha256:22b3c35d6b2dae14990f6c1be2912bf23ffe50b220e708a28ab1bb92b1c730e5"},
]
[package.dependencies]
@@ -655,17 +674,17 @@ msal-extensions = ">=0.3.0"
[[package]]
name = "azure-mgmt-core"
-version = "1.4.0"
+version = "1.5.0"
description = "Microsoft Azure Management Core Library for Python"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "azure-mgmt-core-1.4.0.zip", hash = "sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae"},
- {file = "azure_mgmt_core-1.4.0-py3-none-any.whl", hash = "sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d"},
+ {file = "azure_mgmt_core-1.5.0-py3-none-any.whl", hash = "sha256:18aaa5a723ee8ae05bf1bfc9f6d0ffb996631c7ea3c922cc86f522973ce07b5f"},
+ {file = "azure_mgmt_core-1.5.0.tar.gz", hash = "sha256:380ae3dfa3639f4a5c246a7db7ed2d08374e88230fd0da3eb899f7c11e5c441a"},
]
[package.dependencies]
-azure-core = ">=1.26.2,<2.0.0"
+azure-core = ">=1.31.0"
[[package]]
name = "azure-storage-blob"
@@ -701,13 +720,13 @@ msrest = ">=0.6.21"
[[package]]
name = "azure-storage-file-share"
-version = "12.19.0"
+version = "12.20.0"
description = "Microsoft Azure Azure File Share Storage Client Library for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "azure_storage_file_share-12.19.0-py3-none-any.whl", hash = "sha256:eac6cf1a454aba58af4e6ba450b36d16aa1d0c49679fb64ea8756bb896698c5b"},
- {file = "azure_storage_file_share-12.19.0.tar.gz", hash = "sha256:ea7a4174dc6c52f50ac8c30f228159fcc3675d1f8ba771b8d0efcbc310740278"},
+ {file = "azure_storage_file_share-12.20.0-py3-none-any.whl", hash = "sha256:fd5c4f09d7784d68b8ed3de473b7525904f1c4b115f9cd200c838b0ee720cb5f"},
+ {file = "azure_storage_file_share-12.20.0.tar.gz", hash = "sha256:f120fc67bae0a84c1b54d06faa70df351be14d1395b9a085350e833f7d347a65"},
]
[package.dependencies]
@@ -817,13 +836,13 @@ files = [
[[package]]
name = "blinker"
-version = "1.8.2"
+version = "1.9.0"
description = "Fast, simple object-to-object and broadcast signaling"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
files = [
- {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"},
- {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"},
+ {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"},
+ {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"},
]
[[package]]
@@ -847,13 +866,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
-version = "1.35.52"
+version = "1.35.63"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
- {file = "botocore-1.35.52-py3-none-any.whl", hash = "sha256:cdbb5e43c9c3a977763e2a10d3b8b9c405d51279f9fcfd4ca4800763b22acba5"},
- {file = "botocore-1.35.52.tar.gz", hash = "sha256:1fe7485ea13d638b089103addd818c12984ff1e4d208de15f180b1e25ad944c5"},
+ {file = "botocore-1.35.63-py3-none-any.whl", hash = "sha256:0ca1200694a4c0a3fa846795d8e8a08404c214e21195eb9e010c4b8a4ca78a4a"},
+ {file = "botocore-1.35.63.tar.gz", hash = "sha256:2b8196bab0a997d206c3d490b52e779ef47dffb68c57c685443f77293aca1589"},
]
[package.dependencies]
@@ -932,10 +951,6 @@ files = [
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
{file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
{file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
{file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
@@ -948,14 +963,8 @@ files = [
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
{file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
{file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
- {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
- {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
{file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
@@ -966,24 +975,8 @@ files = [
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
{file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
{file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
- {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
- {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
- {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
- {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
- {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
- {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
- {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
- {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
- {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
- {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
- {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
- {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
{file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
@@ -993,10 +986,6 @@ files = [
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
{file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
{file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
{file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
@@ -1008,10 +997,6 @@ files = [
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
{file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
{file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
{file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
@@ -1024,10 +1009,6 @@ files = [
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
{file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
{file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
{file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
@@ -1040,10 +1021,6 @@ files = [
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
{file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
{file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
@@ -1745,76 +1722,65 @@ cron = ["capturer (>=2.4)"]
[[package]]
name = "contourpy"
-version = "1.3.0"
+version = "1.3.1"
description = "Python library for calculating contours of 2D quadrilateral grids"
optional = false
-python-versions = ">=3.9"
+python-versions = ">=3.10"
files = [
- {file = "contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7"},
- {file = "contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42"},
- {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7"},
- {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab"},
- {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589"},
- {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41"},
- {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d"},
- {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223"},
- {file = "contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f"},
- {file = "contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b"},
- {file = "contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad"},
- {file = "contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49"},
- {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66"},
- {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081"},
- {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1"},
- {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d"},
- {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c"},
- {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb"},
- {file = "contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c"},
- {file = "contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67"},
- {file = "contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f"},
- {file = "contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6"},
- {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639"},
- {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c"},
- {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06"},
- {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09"},
- {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd"},
- {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35"},
- {file = "contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb"},
- {file = "contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b"},
- {file = "contourpy-1.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3e1c7fa44aaae40a2247e2e8e0627f4bea3dd257014764aa644f319a5f8600e3"},
- {file = "contourpy-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:364174c2a76057feef647c802652f00953b575723062560498dc7930fc9b1cb7"},
- {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b238b3b3b649e09ce9aaf51f0c261d38644bdfa35cbaf7b263457850957a84"},
- {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d51fca85f9f7ad0b65b4b9fe800406d0d77017d7270d31ec3fb1cc07358fdea0"},
- {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:732896af21716b29ab3e988d4ce14bc5133733b85956316fb0c56355f398099b"},
- {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73f659398a0904e125280836ae6f88ba9b178b2fed6884f3b1f95b989d2c8da"},
- {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6c7c2408b7048082932cf4e641fa3b8ca848259212f51c8c59c45aa7ac18f14"},
- {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f317576606de89da6b7e0861cf6061f6146ead3528acabff9236458a6ba467f8"},
- {file = "contourpy-1.3.0-cp313-cp313-win32.whl", hash = "sha256:31cd3a85dbdf1fc002280c65caa7e2b5f65e4a973fcdf70dd2fdcb9868069294"},
- {file = "contourpy-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4553c421929ec95fb07b3aaca0fae668b2eb5a5203d1217ca7c34c063c53d087"},
- {file = "contourpy-1.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:345af746d7766821d05d72cb8f3845dfd08dd137101a2cb9b24de277d716def8"},
- {file = "contourpy-1.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3bb3808858a9dc68f6f03d319acd5f1b8a337e6cdda197f02f4b8ff67ad2057b"},
- {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:420d39daa61aab1221567b42eecb01112908b2cab7f1b4106a52caaec8d36973"},
- {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d63ee447261e963af02642ffcb864e5a2ee4cbfd78080657a9880b8b1868e18"},
- {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167d6c890815e1dac9536dca00828b445d5d0df4d6a8c6adb4a7ec3166812fa8"},
- {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:710a26b3dc80c0e4febf04555de66f5fd17e9cf7170a7b08000601a10570bda6"},
- {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:75ee7cb1a14c617f34a51d11fa7524173e56551646828353c4af859c56b766e2"},
- {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:33c92cdae89ec5135d036e7218e69b0bb2851206077251f04a6c4e0e21f03927"},
- {file = "contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8"},
- {file = "contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c"},
- {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca"},
- {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f"},
- {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc"},
- {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2"},
- {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e"},
- {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800"},
- {file = "contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5"},
- {file = "contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843"},
- {file = "contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c"},
- {file = "contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779"},
- {file = "contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4"},
- {file = "contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0"},
- {file = "contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102"},
- {file = "contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb"},
- {file = "contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4"},
+ {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"},
+ {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"},
+ {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2f926efda994cdf3c8d3fdb40b9962f86edbc4457e739277b961eced3d0b4c1"},
+ {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adce39d67c0edf383647a3a007de0a45fd1b08dedaa5318404f1a73059c2512b"},
+ {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abbb49fb7dac584e5abc6636b7b2a7227111c4f771005853e7d25176daaf8453"},
+ {file = "contourpy-1.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0cffcbede75c059f535725c1680dfb17b6ba8753f0c74b14e6a9c68c29d7ea3"},
+ {file = "contourpy-1.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab29962927945d89d9b293eabd0d59aea28d887d4f3be6c22deaefbb938a7277"},
+ {file = "contourpy-1.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:974d8145f8ca354498005b5b981165b74a195abfae9a8129df3e56771961d595"},
+ {file = "contourpy-1.3.1-cp310-cp310-win32.whl", hash = "sha256:ac4578ac281983f63b400f7fe6c101bedc10651650eef012be1ccffcbacf3697"},
+ {file = "contourpy-1.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:174e758c66bbc1c8576992cec9599ce8b6672b741b5d336b5c74e35ac382b18e"},
+ {file = "contourpy-1.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8b974d8db2c5610fb4e76307e265de0edb655ae8169e8b21f41807ccbeec4b"},
+ {file = "contourpy-1.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20914c8c973f41456337652a6eeca26d2148aa96dd7ac323b74516988bea89fc"},
+ {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d40d37c1c3a4961b4619dd9d77b12124a453cc3d02bb31a07d58ef684d3d86"},
+ {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:113231fe3825ebf6f15eaa8bc1f5b0ddc19d42b733345eae0934cb291beb88b6"},
+ {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4dbbc03a40f916a8420e420d63e96a1258d3d1b58cbdfd8d1f07b49fcbd38e85"},
+ {file = "contourpy-1.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a04ecd68acbd77fa2d39723ceca4c3197cb2969633836ced1bea14e219d077c"},
+ {file = "contourpy-1.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c414fc1ed8ee1dbd5da626cf3710c6013d3d27456651d156711fa24f24bd1291"},
+ {file = "contourpy-1.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:31c1b55c1f34f80557d3830d3dd93ba722ce7e33a0b472cba0ec3b6535684d8f"},
+ {file = "contourpy-1.3.1-cp311-cp311-win32.whl", hash = "sha256:f611e628ef06670df83fce17805c344710ca5cde01edfdc72751311da8585375"},
+ {file = "contourpy-1.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:b2bdca22a27e35f16794cf585832e542123296b4687f9fd96822db6bae17bfc9"},
+ {file = "contourpy-1.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0ffa84be8e0bd33410b17189f7164c3589c229ce5db85798076a3fa136d0e509"},
+ {file = "contourpy-1.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805617228ba7e2cbbfb6c503858e626ab528ac2a32a04a2fe88ffaf6b02c32bc"},
+ {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade08d343436a94e633db932e7e8407fe7de8083967962b46bdfc1b0ced39454"},
+ {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47734d7073fb4590b4a40122b35917cd77be5722d80683b249dac1de266aac80"},
+ {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ba94a401342fc0f8b948e57d977557fbf4d515f03c67682dd5c6191cb2d16ec"},
+ {file = "contourpy-1.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efa874e87e4a647fd2e4f514d5e91c7d493697127beb95e77d2f7561f6905bd9"},
+ {file = "contourpy-1.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1bf98051f1045b15c87868dbaea84f92408337d4f81d0e449ee41920ea121d3b"},
+ {file = "contourpy-1.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61332c87493b00091423e747ea78200659dc09bdf7fd69edd5e98cef5d3e9a8d"},
+ {file = "contourpy-1.3.1-cp312-cp312-win32.whl", hash = "sha256:e914a8cb05ce5c809dd0fe350cfbb4e881bde5e2a38dc04e3afe1b3e58bd158e"},
+ {file = "contourpy-1.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:08d9d449a61cf53033612cb368f3a1b26cd7835d9b8cd326647efe43bca7568d"},
+ {file = "contourpy-1.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a761d9ccfc5e2ecd1bf05534eda382aa14c3e4f9205ba5b1684ecfe400716ef2"},
+ {file = "contourpy-1.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:523a8ee12edfa36f6d2a49407f705a6ef4c5098de4f498619787e272de93f2d5"},
+ {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece6df05e2c41bd46776fbc712e0996f7c94e0d0543af1656956d150c4ca7c81"},
+ {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:573abb30e0e05bf31ed067d2f82500ecfdaec15627a59d63ea2d95714790f5c2"},
+ {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fa36448e6a3a1a9a2ba23c02012c43ed88905ec80163f2ffe2421c7192a5d7"},
+ {file = "contourpy-1.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ea9924d28fc5586bf0b42d15f590b10c224117e74409dd7a0be3b62b74a501c"},
+ {file = "contourpy-1.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b75aa69cb4d6f137b36f7eb2ace9280cfb60c55dc5f61c731fdf6f037f958a3"},
+ {file = "contourpy-1.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:041b640d4ec01922083645a94bb3b2e777e6b626788f4095cf21abbe266413c1"},
+ {file = "contourpy-1.3.1-cp313-cp313-win32.whl", hash = "sha256:36987a15e8ace5f58d4d5da9dca82d498c2bbb28dff6e5d04fbfcc35a9cb3a82"},
+ {file = "contourpy-1.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7895f46d47671fa7ceec40f31fae721da51ad34bdca0bee83e38870b1f47ffd"},
+ {file = "contourpy-1.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9ddeb796389dadcd884c7eb07bd14ef12408aaae358f0e2ae24114d797eede30"},
+ {file = "contourpy-1.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19c1555a6801c2f084c7ddc1c6e11f02eb6a6016ca1318dd5452ba3f613a1751"},
+ {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:841ad858cff65c2c04bf93875e384ccb82b654574a6d7f30453a04f04af71342"},
+ {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4318af1c925fb9a4fb190559ef3eec206845f63e80fb603d47f2d6d67683901c"},
+ {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14c102b0eab282427b662cb590f2e9340a9d91a1c297f48729431f2dcd16e14f"},
+ {file = "contourpy-1.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05e806338bfeaa006acbdeba0ad681a10be63b26e1b17317bfac3c5d98f36cda"},
+ {file = "contourpy-1.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4d76d5993a34ef3df5181ba3c92fabb93f1eaa5729504fb03423fcd9f3177242"},
+ {file = "contourpy-1.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:89785bb2a1980c1bd87f0cb1517a71cde374776a5f150936b82580ae6ead44a1"},
+ {file = "contourpy-1.3.1-cp313-cp313t-win32.whl", hash = "sha256:8eb96e79b9f3dcadbad2a3891672f81cdcab7f95b27f28f1c67d75f045b6b4f1"},
+ {file = "contourpy-1.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:287ccc248c9e0d0566934e7d606201abd74761b5703d804ff3df8935f523d546"},
+ {file = "contourpy-1.3.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b457d6430833cee8e4b8e9b6f07aa1c161e5e0d52e118dc102c8f9bd7dd060d6"},
+ {file = "contourpy-1.3.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb76c1a154b83991a3cbbf0dfeb26ec2833ad56f95540b442c73950af2013750"},
+ {file = "contourpy-1.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:44a29502ca9c7b5ba389e620d44f2fbe792b1fb5734e8b931ad307071ec58c53"},
+ {file = "contourpy-1.3.1.tar.gz", hash = "sha256:dfd97abd83335045a913e3bcc4a09c0ceadbe66580cf573fe961f4a825efa699"},
]
[package.dependencies]
@@ -2061,19 +2027,23 @@ tokenizer = ["tiktoken"]
[[package]]
name = "dataclass-wizard"
-version = "0.23.0"
+version = "0.28.0"
description = "Marshal dataclasses to/from JSON. Use field properties with initial values. Construct a dataclass schema with JSON input."
optional = false
python-versions = "*"
files = [
- {file = "dataclass-wizard-0.23.0.tar.gz", hash = "sha256:da29ec19846d46a1eef0692ba7c59c8a86ecd3a9eaddc0511cfc7485ad6d9c50"},
- {file = "dataclass_wizard-0.23.0-py2.py3-none-any.whl", hash = "sha256:50207dec6d36494421366b49b7a9ba6a4d831e2650c0af25cb4c057103d4a97c"},
+ {file = "dataclass-wizard-0.28.0.tar.gz", hash = "sha256:dd295cff8df6d8167a79048b77e91a3a1287a5905363f8df4de819b50d83b03a"},
+ {file = "dataclass_wizard-0.28.0-py2.py3-none-any.whl", hash = "sha256:996fa46475b9192a48a057c34f04597bc97be5bc2f163b99cb1de6f778ca1f7f"},
]
+[package.dependencies]
+typing-extensions = {version = ">=4", markers = "python_version == \"3.9\" or python_version == \"3.10\""}
+
[package.extras]
-dev = ["Sphinx (==5.3.0)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dataclass-factory (==2.12)", "dataclasses-json (==0.5.6)", "flake8 (>=3)", "jsons (==1.6.1)", "pip (>=21.3.1)", "pytest (==7.0.1)", "pytest-cov (==3.0.0)", "pytest-mock (>=3.6.1)", "pytimeparse (==1.1.8)", "sphinx-issues (==3.0.1)", "sphinx-issues (==4.0.0)", "tox (==3.24.5)", "twine (==3.8.0)", "watchdog[watchmedo] (==2.1.6)", "wheel (==0.37.1)", "wheel (==0.42.0)"]
+dev = ["Sphinx (==7.4.7)", "Sphinx (==8.1.3)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dataclass-factory (==2.16)", "dataclass-wizard[toml]", "dataclasses-json (==0.6.7)", "flake8 (>=3)", "jsons (==1.6.3)", "pip (>=21.3.1)", "pytest (==8.3.3)", "pytest-cov (==6.0.0)", "pytest-mock (>=3.6.1)", "pytimeparse (==1.1.8)", "sphinx-issues (==5.0.0)", "tomli (>=2,<3)", "tomli (>=2,<3)", "tomli-w (>=1,<2)", "tox (==4.23.2)", "twine (==5.1.1)", "watchdog[watchmedo] (==6.0.0)", "wheel (==0.45.0)"]
timedelta = ["pytimeparse (>=1.1.7)"]
-yaml = ["PyYAML (>=5.3)"]
+toml = ["tomli (>=2,<3)", "tomli (>=2,<3)", "tomli-w (>=1,<2)"]
+yaml = ["PyYAML (>=6,<7)"]
[[package]]
name = "dataclasses-json"
@@ -2092,13 +2062,13 @@ typing-inspect = ">=0.4.0,<1"
[[package]]
name = "db-dtypes"
-version = "1.3.0"
+version = "1.3.1"
description = "Pandas Data Types for SQL systems (BigQuery, Spanner)"
optional = false
python-versions = ">=3.7"
files = [
- {file = "db_dtypes-1.3.0-py2.py3-none-any.whl", hash = "sha256:7e65c59f849ccbe6f7bc4d0253edcc212a7907662906921caba3e4aadd0bc277"},
- {file = "db_dtypes-1.3.0.tar.gz", hash = "sha256:7bcbc8858b07474dc85b77bb2f3ae488978d1336f5ea73b58c39d9118bc3e91b"},
+ {file = "db_dtypes-1.3.1-py2.py3-none-any.whl", hash = "sha256:fbc9d1740d94aaf2b5ae24601cfc875a69b4635bb9d049e3c3036e9f10203af8"},
+ {file = "db_dtypes-1.3.1.tar.gz", hash = "sha256:a058f05dab100891f3e76a7a3db9ad0f107f18dd3d1bdd13680749a2f07eae77"},
]
[package.dependencies]
@@ -2131,20 +2101,20 @@ files = [
[[package]]
name = "deprecated"
-version = "1.2.14"
+version = "1.2.15"
description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
files = [
- {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"},
- {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"},
+ {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"},
+ {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"},
]
[package.dependencies]
wrapt = ">=1.10,<2"
[package.extras]
-dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
+dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"]
[[package]]
name = "deprecation"
@@ -2239,74 +2209,74 @@ typing_extensions = ">=4.0,<5.0"
[[package]]
name = "duckdb"
-version = "1.1.2"
+version = "1.1.3"
description = "DuckDB in-process database"
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "duckdb-1.1.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:91e7f99cf5cab1d26f92cb014429153497d805e79689baa44f4c4585a8cb243f"},
- {file = "duckdb-1.1.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:0107de622fe208142a1108263a03c43956048dcc99be3702d8e5d2aeaf99554c"},
- {file = "duckdb-1.1.2-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:8a09610f780857677725897856f8cdf3cafd8a991f871e6cb8ba88b2dbc8d737"},
- {file = "duckdb-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f0ddac0482f0f3fece54d720d13819e82ae26c01a939ffa66a87be53f7f665"},
- {file = "duckdb-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84103373e818758dfa361d27781d0f096553843c5ffb9193260a0786c5248270"},
- {file = "duckdb-1.1.2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bfdfd23e2bf58014ad0673973bd0ed88cd048dfe8e82420814a71d7d52ef2288"},
- {file = "duckdb-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:25889e6e29b87047b1dd56385ac08156e4713c59326cc6fff89657d01b2c417b"},
- {file = "duckdb-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:312570fa5277c3079de18388b86c2d87cbe1044838bb152b235c0227581d5d42"},
- {file = "duckdb-1.1.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:568439ea4fce8cb72ec1f767cd510686a9e7e29a011fc7c56d990059a6e94e48"},
- {file = "duckdb-1.1.2-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:74974f2d7210623a5d61b1fb0cb589c6e5ffcbf7dbb757a04c5ba24adcfc8cac"},
- {file = "duckdb-1.1.2-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:e26422a3358c816d764639070945b73eef55d1b4df990989e3492c85ef725c21"},
- {file = "duckdb-1.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87e972bd452eeeab197fe39dcaeecdb7c264b1f75a0ee67e532e235fe45b84df"},
- {file = "duckdb-1.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a6b73e70b73c8df85da383f6e557c03cad5c877868b9a7e41715761e8166c1e"},
- {file = "duckdb-1.1.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:623cb1952466aae5907af84107bcdec25a5ca021a8b6441e961f41edc724f6f2"},
- {file = "duckdb-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9fc0b550f96901fa7e76dc70a13f6477ad3e18ef1cb21d414c3a5569de3f27e"},
- {file = "duckdb-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:181edb1973bd8f493bcb6ecfa035f1a592dff4667758592f300619012ba251c0"},
- {file = "duckdb-1.1.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:83372b1b411086cac01ab2071122772fa66170b1b41ddbc37527464066083668"},
- {file = "duckdb-1.1.2-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:db37441deddfee6ac35a0c742d2f9e90e4e50b9e76d586a060d122b8fc56dada"},
- {file = "duckdb-1.1.2-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:19142a77e72874aeaa6fda30aeb13612c6de5e8c60fbcc3392cea6ef0694eeaf"},
- {file = "duckdb-1.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:099d99dd48d6e4682a3dd6233ceab73d977ebe1a87afaac54cf77c844e24514a"},
- {file = "duckdb-1.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be86e586ca7af7e807f72479a2b8d0983565360b19dbda4ef8a9d7b3909b8e2c"},
- {file = "duckdb-1.1.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:578e0953e4d8ba8da0cd69fb2930c45f51ce47d213b77d8a4cd461f9c0960b87"},
- {file = "duckdb-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:72b5eb5762c1a5e68849c7143f3b3747a9f15c040e34e41559f233a1569ad16f"},
- {file = "duckdb-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:9b4c6b6a08180261d98330d97355503961a25ca31cd9ef296e0681f7895b4a2c"},
- {file = "duckdb-1.1.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:695dcbc561374b126e86659709feadf883c9969ed718e94713edd4ba15d16619"},
- {file = "duckdb-1.1.2-cp313-cp313-macosx_12_0_universal2.whl", hash = "sha256:ada29be1e889f486c6cf1f6dffd15463e748faf361f33996f2e862779edc24a9"},
- {file = "duckdb-1.1.2-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:6ca722738fa9eb6218619740631de29acfdd132de6f6a6350fee5e291c2f6117"},
- {file = "duckdb-1.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c796d33f1e5a0c8c570d22da0c0b1db8578687e427029e1ce2c8ce3f9fffa6a3"},
- {file = "duckdb-1.1.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5c0996988a70dd3bc8111d9b9aeab7e38ed1999a52607c5f1b528e362b4dd1c"},
- {file = "duckdb-1.1.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c37b039f6d6fed14d89450f5ccf54922b3304192d7412e12d6cc8d9e757f7a2"},
- {file = "duckdb-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e8c766b87f675c76d6d17103bf6fb9fb1a9e2fcb3d9b25c28bbc634bde31223e"},
- {file = "duckdb-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:e3e6300b7ccaf64b609f4f0780a6e1d25ab8cf34cceed46e62c35b6c4c5cb63b"},
- {file = "duckdb-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a01fae9604a54ecbc26e7503c522311f15afbd2870e6d8f6fbef4545dfae550"},
- {file = "duckdb-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:492b1d86a696428bd3f14dc1c7c3230e2dbca8978f288be64b04a26e0e00fad5"},
- {file = "duckdb-1.1.2-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bba58459ad897a78c4e478a097626fc266459a40338cecc68a49a8d5dc72fb7"},
- {file = "duckdb-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d395a3bf510bf24686821eec15802624797dcb33e8f14f8a7cc8e17d909474af"},
- {file = "duckdb-1.1.2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:fd800f75728727fe699ed1eb22b636867cf48c9dd105ee88b977e20c89df4509"},
- {file = "duckdb-1.1.2-cp38-cp38-macosx_12_0_universal2.whl", hash = "sha256:d8caaf43909e49537e26df51d80d075ae2b25a610d28ed8bd31d6ccebeaf3c65"},
- {file = "duckdb-1.1.2-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:564166811c68d9c7f9911eb707ad32ec9c2507b98336d894fbe658b85bf1c697"},
- {file = "duckdb-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19386aa09f0d6f97634ba2972096d1c80d880176dfb0e949eadc91c98262a663"},
- {file = "duckdb-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9e8387bcc9a591ad14011ddfec0d408d1d9b1889c6c9b495a04c7016a24b9b3"},
- {file = "duckdb-1.1.2-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8c5ff4970403ed3ff0ac71fe0ce1e6be3199df9d542afc84c424b444ba4ffe8"},
- {file = "duckdb-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:9283dcca87c3260eb631a99d738fa72b8545ed45b475bc72ad254f7310e14284"},
- {file = "duckdb-1.1.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f87edaf20001530e63a4f7bda13b55dc3152d7171226915f2bf34e0813c8759e"},
- {file = "duckdb-1.1.2-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:efec169b3fe0b821e3207ba3e445f227d42dd62b4440ff79c37fa168a4fc5a71"},
- {file = "duckdb-1.1.2-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:89164a2d29d56605a95ee5032aa415dd487028c4fd3e06d971497840e74c56e7"},
- {file = "duckdb-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6858e10c60ff7e70e61d3dd53d2545c8b2609942e45fd6de38cd0dee52932de3"},
- {file = "duckdb-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca967c5a57b1d0cb0fd5e539ab24110e5a59dcbedd365bb2dc80533d6e44a8d"},
- {file = "duckdb-1.1.2-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ce949f1d7999aa6a046eb64067eee41d4c5c2872ba4fa408c9947742d0c7231"},
- {file = "duckdb-1.1.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ba6d1f918e6ca47a368a0c32806016405cb9beb2c245806b0ca998f569d2bdf"},
- {file = "duckdb-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:7111fd3e7b334a7be383313ce29918b7c643e4f6ef44d6d63c3ab3fa6716c114"},
- {file = "duckdb-1.1.2.tar.gz", hash = "sha256:c8232861dc8ec6daa29067056d5a0e5789919f2ab22ab792787616d7cd52f02a"},
+ {file = "duckdb-1.1.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:1c0226dc43e2ee4cc3a5a4672fddb2d76fd2cf2694443f395c02dd1bea0b7fce"},
+ {file = "duckdb-1.1.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7c71169fa804c0b65e49afe423ddc2dc83e198640e3b041028da8110f7cd16f7"},
+ {file = "duckdb-1.1.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:872d38b65b66e3219d2400c732585c5b4d11b13d7a36cd97908d7981526e9898"},
+ {file = "duckdb-1.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25fb02629418c0d4d94a2bc1776edaa33f6f6ccaa00bd84eb96ecb97ae4b50e9"},
+ {file = "duckdb-1.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3f5cd604e7c39527e6060f430769b72234345baaa0987f9500988b2814f5e4"},
+ {file = "duckdb-1.1.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08935700e49c187fe0e9b2b86b5aad8a2ccd661069053e38bfaed3b9ff795efd"},
+ {file = "duckdb-1.1.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9b47036945e1db32d70e414a10b1593aec641bd4c5e2056873d971cc21e978b"},
+ {file = "duckdb-1.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:35c420f58abc79a68a286a20fd6265636175fadeca1ce964fc8ef159f3acc289"},
+ {file = "duckdb-1.1.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:4f0e2e5a6f5a53b79aee20856c027046fba1d73ada6178ed8467f53c3877d5e0"},
+ {file = "duckdb-1.1.3-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:911d58c22645bfca4a5a049ff53a0afd1537bc18fedb13bc440b2e5af3c46148"},
+ {file = "duckdb-1.1.3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:c443d3d502335e69fc1e35295fcfd1108f72cb984af54c536adfd7875e79cee5"},
+ {file = "duckdb-1.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a55169d2d2e2e88077d91d4875104b58de45eff6a17a59c7dc41562c73df4be"},
+ {file = "duckdb-1.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d0767ada9f06faa5afcf63eb7ba1befaccfbcfdac5ff86f0168c673dd1f47aa"},
+ {file = "duckdb-1.1.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51c6d79e05b4a0933672b1cacd6338f882158f45ef9903aef350c4427d9fc898"},
+ {file = "duckdb-1.1.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:183ac743f21c6a4d6adfd02b69013d5fd78e5e2cd2b4db023bc8a95457d4bc5d"},
+ {file = "duckdb-1.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:a30dd599b8090ea6eafdfb5a9f1b872d78bac318b6914ada2d35c7974d643640"},
+ {file = "duckdb-1.1.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:a433ae9e72c5f397c44abdaa3c781d94f94f4065bcbf99ecd39433058c64cb38"},
+ {file = "duckdb-1.1.3-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:d08308e0a46c748d9c30f1d67ee1143e9c5ea3fbcccc27a47e115b19e7e78aa9"},
+ {file = "duckdb-1.1.3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:5d57776539211e79b11e94f2f6d63de77885f23f14982e0fac066f2885fcf3ff"},
+ {file = "duckdb-1.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e59087dbbb63705f2483544e01cccf07d5b35afa58be8931b224f3221361d537"},
+ {file = "duckdb-1.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ebf5f60ddbd65c13e77cddb85fe4af671d31b851f125a4d002a313696af43f1"},
+ {file = "duckdb-1.1.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4ef7ba97a65bd39d66f2a7080e6fb60e7c3e41d4c1e19245f90f53b98e3ac32"},
+ {file = "duckdb-1.1.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f58db1b65593ff796c8ea6e63e2e144c944dd3d51c8d8e40dffa7f41693d35d3"},
+ {file = "duckdb-1.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:e86006958e84c5c02f08f9b96f4bc26990514eab329b1b4f71049b3727ce5989"},
+ {file = "duckdb-1.1.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:0897f83c09356206ce462f62157ce064961a5348e31ccb2a557a7531d814e70e"},
+ {file = "duckdb-1.1.3-cp313-cp313-macosx_12_0_universal2.whl", hash = "sha256:cddc6c1a3b91dcc5f32493231b3ba98f51e6d3a44fe02839556db2b928087378"},
+ {file = "duckdb-1.1.3-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:1d9ab6143e73bcf17d62566e368c23f28aa544feddfd2d8eb50ef21034286f24"},
+ {file = "duckdb-1.1.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f073d15d11a328f2e6d5964a704517e818e930800b7f3fa83adea47f23720d3"},
+ {file = "duckdb-1.1.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5724fd8a49e24d730be34846b814b98ba7c304ca904fbdc98b47fa95c0b0cee"},
+ {file = "duckdb-1.1.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51e7dbd968b393343b226ab3f3a7b5a68dee6d3fe59be9d802383bf916775cb8"},
+ {file = "duckdb-1.1.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:00cca22df96aa3473fe4584f84888e2cf1c516e8c2dd837210daec44eadba586"},
+ {file = "duckdb-1.1.3-cp313-cp313-win_amd64.whl", hash = "sha256:77f26884c7b807c7edd07f95cf0b00e6d47f0de4a534ac1706a58f8bc70d0d31"},
+ {file = "duckdb-1.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4748635875fc3c19a7320a6ae7410f9295557450c0ebab6d6712de12640929a"},
+ {file = "duckdb-1.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b74e121ab65dbec5290f33ca92301e3a4e81797966c8d9feef6efdf05fc6dafd"},
+ {file = "duckdb-1.1.3-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c619e4849837c8c83666f2cd5c6c031300cd2601e9564b47aa5de458ff6e69d"},
+ {file = "duckdb-1.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0ba6baa0af33ded836b388b09433a69b8bec00263247f6bf0a05c65c897108d3"},
+ {file = "duckdb-1.1.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:ecb1dc9062c1cc4d2d88a5e5cd8cc72af7818ab5a3c0f796ef0ffd60cfd3efb4"},
+ {file = "duckdb-1.1.3-cp38-cp38-macosx_12_0_universal2.whl", hash = "sha256:5ace6e4b1873afdd38bd6cc8fcf90310fb2d454f29c39a61d0c0cf1a24ad6c8d"},
+ {file = "duckdb-1.1.3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:a1fa0c502f257fa9caca60b8b1478ec0f3295f34bb2efdc10776fc731b8a6c5f"},
+ {file = "duckdb-1.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6411e21a2128d478efbd023f2bdff12464d146f92bc3e9c49247240448ace5a6"},
+ {file = "duckdb-1.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5336939d83837af52731e02b6a78a446794078590aa71fd400eb17f083dda3e"},
+ {file = "duckdb-1.1.3-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f549af9f7416573ee48db1cf8c9d27aeed245cb015f4b4f975289418c6cf7320"},
+ {file = "duckdb-1.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:2141c6b28162199999075d6031b5d63efeb97c1e68fb3d797279d31c65676269"},
+ {file = "duckdb-1.1.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:09c68522c30fc38fc972b8a75e9201616b96ae6da3444585f14cf0d116008c95"},
+ {file = "duckdb-1.1.3-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:8ee97ec337794c162c0638dda3b4a30a483d0587deda22d45e1909036ff0b739"},
+ {file = "duckdb-1.1.3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:a1f83c7217c188b7ab42e6a0963f42070d9aed114f6200e3c923c8899c090f16"},
+ {file = "duckdb-1.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aa3abec8e8995a03ff1a904b0e66282d19919f562dd0a1de02f23169eeec461"},
+ {file = "duckdb-1.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80158f4c7c7ada46245837d5b6869a336bbaa28436fbb0537663fa324a2750cd"},
+ {file = "duckdb-1.1.3-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:647f17bd126170d96a38a9a6f25fca47ebb0261e5e44881e3782989033c94686"},
+ {file = "duckdb-1.1.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:252d9b17d354beb9057098d4e5d5698e091a4f4a0d38157daeea5fc0ec161670"},
+ {file = "duckdb-1.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:eeacb598120040e9591f5a4edecad7080853aa8ac27e62d280f151f8c862afa3"},
+ {file = "duckdb-1.1.3.tar.gz", hash = "sha256:68c3a46ab08836fe041d15dcbf838f74a990d551db47cb24ab1c4576fc19351c"},
]
[[package]]
name = "duckduckgo-search"
-version = "6.3.3"
+version = "6.3.5"
description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine."
optional = false
python-versions = ">=3.8"
files = [
- {file = "duckduckgo_search-6.3.3-py3-none-any.whl", hash = "sha256:63e5d6b958bd532016bc8a53e8b18717751bf7ef51b1c83e59b9f5780c79e64c"},
- {file = "duckduckgo_search-6.3.3.tar.gz", hash = "sha256:4d49508f01f85c8675765fdd4cc25eedbb3450e129b35209897fded874f6568f"},
+ {file = "duckduckgo_search-6.3.5-py3-none-any.whl", hash = "sha256:5b29ac55f178214870ccc911ef5e1e350c21a904e9e1dbd6445f78c16ee938f9"},
+ {file = "duckduckgo_search-6.3.5.tar.gz", hash = "sha256:bc7604859d6f17b88ec634f322b1920207fe3d62aa61ee6dccecb19d6dda6beb"},
]
[package.dependencies]
@@ -2475,13 +2445,13 @@ test = ["pillow", "pytest", "pytest-asyncio"]
[[package]]
name = "fastapi"
-version = "0.115.4"
+version = "0.115.5"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.8"
files = [
- {file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"},
- {file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"},
+ {file = "fastapi-0.115.5-py3-none-any.whl", hash = "sha256:596b95adbe1474da47049e802f9a65ab2ffa9c2b07e7efee70eb8a66c9f2f796"},
+ {file = "fastapi-0.115.5.tar.gz", hash = "sha256:0e7a4d0dc0d01c68df21887cce0945e72d3c48b9f4f79dfe7a7d53aa08fbb289"},
]
[package.dependencies]
@@ -2788,59 +2758,61 @@ fonttools = "*"
[[package]]
name = "fonttools"
-version = "4.54.1"
+version = "4.55.0"
description = "Tools to manipulate font files"
optional = false
python-versions = ">=3.8"
files = [
- {file = "fonttools-4.54.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ed7ee041ff7b34cc62f07545e55e1468808691dddfd315d51dd82a6b37ddef2"},
- {file = "fonttools-4.54.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41bb0b250c8132b2fcac148e2e9198e62ff06f3cc472065dff839327945c5882"},
- {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7965af9b67dd546e52afcf2e38641b5be956d68c425bef2158e95af11d229f10"},
- {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278913a168f90d53378c20c23b80f4e599dca62fbffae4cc620c8eed476b723e"},
- {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0e88e3018ac809b9662615072dcd6b84dca4c2d991c6d66e1970a112503bba7e"},
- {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4aa4817f0031206e637d1e685251ac61be64d1adef111060df84fdcbc6ab6c44"},
- {file = "fonttools-4.54.1-cp310-cp310-win32.whl", hash = "sha256:7e3b7d44e18c085fd8c16dcc6f1ad6c61b71ff463636fcb13df7b1b818bd0c02"},
- {file = "fonttools-4.54.1-cp310-cp310-win_amd64.whl", hash = "sha256:dd9cc95b8d6e27d01e1e1f1fae8559ef3c02c76317da650a19047f249acd519d"},
- {file = "fonttools-4.54.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5419771b64248484299fa77689d4f3aeed643ea6630b2ea750eeab219588ba20"},
- {file = "fonttools-4.54.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:301540e89cf4ce89d462eb23a89464fef50915255ece765d10eee8b2bf9d75b2"},
- {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ae5091547e74e7efecc3cbf8e75200bc92daaeb88e5433c5e3e95ea8ce5aa7"},
- {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82834962b3d7c5ca98cb56001c33cf20eb110ecf442725dc5fdf36d16ed1ab07"},
- {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d26732ae002cc3d2ecab04897bb02ae3f11f06dd7575d1df46acd2f7c012a8d8"},
- {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58974b4987b2a71ee08ade1e7f47f410c367cdfc5a94fabd599c88165f56213a"},
- {file = "fonttools-4.54.1-cp311-cp311-win32.whl", hash = "sha256:ab774fa225238986218a463f3fe151e04d8c25d7de09df7f0f5fce27b1243dbc"},
- {file = "fonttools-4.54.1-cp311-cp311-win_amd64.whl", hash = "sha256:07e005dc454eee1cc60105d6a29593459a06321c21897f769a281ff2d08939f6"},
- {file = "fonttools-4.54.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:54471032f7cb5fca694b5f1a0aaeba4af6e10ae989df408e0216f7fd6cdc405d"},
- {file = "fonttools-4.54.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fa92cb248e573daab8d032919623cc309c005086d743afb014c836636166f08"},
- {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a911591200114969befa7f2cb74ac148bce5a91df5645443371aba6d222e263"},
- {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93d458c8a6a354dc8b48fc78d66d2a8a90b941f7fec30e94c7ad9982b1fa6bab"},
- {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5eb2474a7c5be8a5331146758debb2669bf5635c021aee00fd7c353558fc659d"},
- {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c9c563351ddc230725c4bdf7d9e1e92cbe6ae8553942bd1fb2b2ff0884e8b714"},
- {file = "fonttools-4.54.1-cp312-cp312-win32.whl", hash = "sha256:fdb062893fd6d47b527d39346e0c5578b7957dcea6d6a3b6794569370013d9ac"},
- {file = "fonttools-4.54.1-cp312-cp312-win_amd64.whl", hash = "sha256:e4564cf40cebcb53f3dc825e85910bf54835e8a8b6880d59e5159f0f325e637e"},
- {file = "fonttools-4.54.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6e37561751b017cf5c40fce0d90fd9e8274716de327ec4ffb0df957160be3bff"},
- {file = "fonttools-4.54.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:357cacb988a18aace66e5e55fe1247f2ee706e01debc4b1a20d77400354cddeb"},
- {file = "fonttools-4.54.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e953cc0bddc2beaf3a3c3b5dd9ab7554677da72dfaf46951e193c9653e515a"},
- {file = "fonttools-4.54.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58d29b9a294573d8319f16f2f79e42428ba9b6480442fa1836e4eb89c4d9d61c"},
- {file = "fonttools-4.54.1-cp313-cp313-win32.whl", hash = "sha256:9ef1b167e22709b46bf8168368b7b5d3efeaaa746c6d39661c1b4405b6352e58"},
- {file = "fonttools-4.54.1-cp313-cp313-win_amd64.whl", hash = "sha256:262705b1663f18c04250bd1242b0515d3bbae177bee7752be67c979b7d47f43d"},
- {file = "fonttools-4.54.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ed2f80ca07025551636c555dec2b755dd005e2ea8fbeb99fc5cdff319b70b23b"},
- {file = "fonttools-4.54.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9dc080e5a1c3b2656caff2ac2633d009b3a9ff7b5e93d0452f40cd76d3da3b3c"},
- {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d152d1be65652fc65e695e5619e0aa0982295a95a9b29b52b85775243c06556"},
- {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8583e563df41fdecef31b793b4dd3af8a9caa03397be648945ad32717a92885b"},
- {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0d1d353ef198c422515a3e974a1e8d5b304cd54a4c2eebcae708e37cd9eeffb1"},
- {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fda582236fee135d4daeca056c8c88ec5f6f6d88a004a79b84a02547c8f57386"},
- {file = "fonttools-4.54.1-cp38-cp38-win32.whl", hash = "sha256:e7d82b9e56716ed32574ee106cabca80992e6bbdcf25a88d97d21f73a0aae664"},
- {file = "fonttools-4.54.1-cp38-cp38-win_amd64.whl", hash = "sha256:ada215fd079e23e060157aab12eba0d66704316547f334eee9ff26f8c0d7b8ab"},
- {file = "fonttools-4.54.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5b8a096e649768c2f4233f947cf9737f8dbf8728b90e2771e2497c6e3d21d13"},
- {file = "fonttools-4.54.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e10d2e0a12e18f4e2dd031e1bf7c3d7017be5c8dbe524d07706179f355c5dac"},
- {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c32d7d4b0958600eac75eaf524b7b7cb68d3a8c196635252b7a2c30d80e986"},
- {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c39287f5c8f4a0c5a55daf9eaf9ccd223ea59eed3f6d467133cc727d7b943a55"},
- {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a7a310c6e0471602fe3bf8efaf193d396ea561486aeaa7adc1f132e02d30c4b9"},
- {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d3b659d1029946f4ff9b6183984578041b520ce0f8fb7078bb37ec7445806b33"},
- {file = "fonttools-4.54.1-cp39-cp39-win32.whl", hash = "sha256:e96bc94c8cda58f577277d4a71f51c8e2129b8b36fd05adece6320dd3d57de8a"},
- {file = "fonttools-4.54.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8a4b261c1ef91e7188a30571be6ad98d1c6d9fa2427244c545e2fa0a2494dd7"},
- {file = "fonttools-4.54.1-py3-none-any.whl", hash = "sha256:37cddd62d83dc4f72f7c3f3c2bcf2697e89a30efb152079896544a93907733bd"},
- {file = "fonttools-4.54.1.tar.gz", hash = "sha256:957f669d4922f92c171ba01bef7f29410668db09f6c02111e22b2bce446f3285"},
+ {file = "fonttools-4.55.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:51c029d4c0608a21a3d3d169dfc3fb776fde38f00b35ca11fdab63ba10a16f61"},
+ {file = "fonttools-4.55.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bca35b4e411362feab28e576ea10f11268b1aeed883b9f22ed05675b1e06ac69"},
+ {file = "fonttools-4.55.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ce4ba6981e10f7e0ccff6348e9775ce25ffadbee70c9fd1a3737e3e9f5fa74f"},
+ {file = "fonttools-4.55.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31d00f9852a6051dac23294a4cf2df80ced85d1d173a61ba90a3d8f5abc63c60"},
+ {file = "fonttools-4.55.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e198e494ca6e11f254bac37a680473a311a88cd40e58f9cc4dc4911dfb686ec6"},
+ {file = "fonttools-4.55.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7208856f61770895e79732e1dcbe49d77bd5783adf73ae35f87fcc267df9db81"},
+ {file = "fonttools-4.55.0-cp310-cp310-win32.whl", hash = "sha256:e7e6a352ff9e46e8ef8a3b1fe2c4478f8a553e1b5a479f2e899f9dc5f2055880"},
+ {file = "fonttools-4.55.0-cp310-cp310-win_amd64.whl", hash = "sha256:636caaeefe586d7c84b5ee0734c1a5ab2dae619dc21c5cf336f304ddb8f6001b"},
+ {file = "fonttools-4.55.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fa34aa175c91477485c44ddfbb51827d470011e558dfd5c7309eb31bef19ec51"},
+ {file = "fonttools-4.55.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:37dbb3fdc2ef7302d3199fb12468481cbebaee849e4b04bc55b77c24e3c49189"},
+ {file = "fonttools-4.55.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5263d8e7ef3c0ae87fbce7f3ec2f546dc898d44a337e95695af2cd5ea21a967"},
+ {file = "fonttools-4.55.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f307f6b5bf9e86891213b293e538d292cd1677e06d9faaa4bf9c086ad5f132f6"},
+ {file = "fonttools-4.55.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f0a4b52238e7b54f998d6a56b46a2c56b59c74d4f8a6747fb9d4042190f37cd3"},
+ {file = "fonttools-4.55.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3e569711464f777a5d4ef522e781dc33f8095ab5efd7548958b36079a9f2f88c"},
+ {file = "fonttools-4.55.0-cp311-cp311-win32.whl", hash = "sha256:2b3ab90ec0f7b76c983950ac601b58949f47aca14c3f21eed858b38d7ec42b05"},
+ {file = "fonttools-4.55.0-cp311-cp311-win_amd64.whl", hash = "sha256:aa046f6a63bb2ad521004b2769095d4c9480c02c1efa7d7796b37826508980b6"},
+ {file = "fonttools-4.55.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:838d2d8870f84fc785528a692e724f2379d5abd3fc9dad4d32f91cf99b41e4a7"},
+ {file = "fonttools-4.55.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f46b863d74bab7bb0d395f3b68d3f52a03444964e67ce5c43ce43a75efce9246"},
+ {file = "fonttools-4.55.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33b52a9cfe4e658e21b1f669f7309b4067910321757fec53802ca8f6eae96a5a"},
+ {file = "fonttools-4.55.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:732a9a63d6ea4a81b1b25a1f2e5e143761b40c2e1b79bb2b68e4893f45139a40"},
+ {file = "fonttools-4.55.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7dd91ac3fcb4c491bb4763b820bcab6c41c784111c24172616f02f4bc227c17d"},
+ {file = "fonttools-4.55.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f0e115281a32ff532118aa851ef497a1b7cda617f4621c1cdf81ace3e36fb0c"},
+ {file = "fonttools-4.55.0-cp312-cp312-win32.whl", hash = "sha256:6c99b5205844f48a05cb58d4a8110a44d3038c67ed1d79eb733c4953c628b0f6"},
+ {file = "fonttools-4.55.0-cp312-cp312-win_amd64.whl", hash = "sha256:f8c8c76037d05652510ae45be1cd8fb5dd2fd9afec92a25374ac82255993d57c"},
+ {file = "fonttools-4.55.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8118dc571921dc9e4b288d9cb423ceaf886d195a2e5329cc427df82bba872cd9"},
+ {file = "fonttools-4.55.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01124f2ca6c29fad4132d930da69158d3f49b2350e4a779e1efbe0e82bd63f6c"},
+ {file = "fonttools-4.55.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ffd58d2691f11f7c8438796e9f21c374828805d33e83ff4b76e4635633674c"},
+ {file = "fonttools-4.55.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5435e5f1eb893c35c2bc2b9cd3c9596b0fcb0a59e7a14121562986dd4c47b8dd"},
+ {file = "fonttools-4.55.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d12081729280c39d001edd0f4f06d696014c26e6e9a0a55488fabc37c28945e4"},
+ {file = "fonttools-4.55.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7ad1f1b98ab6cb927ab924a38a8649f1ffd7525c75fe5b594f5dab17af70e18"},
+ {file = "fonttools-4.55.0-cp313-cp313-win32.whl", hash = "sha256:abe62987c37630dca69a104266277216de1023cf570c1643bb3a19a9509e7a1b"},
+ {file = "fonttools-4.55.0-cp313-cp313-win_amd64.whl", hash = "sha256:2863555ba90b573e4201feaf87a7e71ca3b97c05aa4d63548a4b69ea16c9e998"},
+ {file = "fonttools-4.55.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:00f7cf55ad58a57ba421b6a40945b85ac7cc73094fb4949c41171d3619a3a47e"},
+ {file = "fonttools-4.55.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f27526042efd6f67bfb0cc2f1610fa20364396f8b1fc5edb9f45bb815fb090b2"},
+ {file = "fonttools-4.55.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e67974326af6a8879dc2a4ec63ab2910a1c1a9680ccd63e4a690950fceddbe"},
+ {file = "fonttools-4.55.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61dc0a13451143c5e987dec5254d9d428f3c2789a549a7cf4f815b63b310c1cc"},
+ {file = "fonttools-4.55.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b2e526b325a903868c62155a6a7e24df53f6ce4c5c3160214d8fe1be2c41b478"},
+ {file = "fonttools-4.55.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b7ef9068a1297714e6fefe5932c33b058aa1d45a2b8be32a4c6dee602ae22b5c"},
+ {file = "fonttools-4.55.0-cp38-cp38-win32.whl", hash = "sha256:55718e8071be35dff098976bc249fc243b58efa263768c611be17fe55975d40a"},
+ {file = "fonttools-4.55.0-cp38-cp38-win_amd64.whl", hash = "sha256:553bd4f8cc327f310c20158e345e8174c8eed49937fb047a8bda51daf2c353c8"},
+ {file = "fonttools-4.55.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f901cef813f7c318b77d1c5c14cf7403bae5cb977cede023e22ba4316f0a8f6"},
+ {file = "fonttools-4.55.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c9679fc0dd7e8a5351d321d8d29a498255e69387590a86b596a45659a39eb0d"},
+ {file = "fonttools-4.55.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2820a8b632f3307ebb0bf57948511c2208e34a4939cf978333bc0a3f11f838"},
+ {file = "fonttools-4.55.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23bbbb49bec613a32ed1b43df0f2b172313cee690c2509f1af8fdedcf0a17438"},
+ {file = "fonttools-4.55.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a656652e1f5d55b9728937a7e7d509b73d23109cddd4e89ee4f49bde03b736c6"},
+ {file = "fonttools-4.55.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f50a1f455902208486fbca47ce33054208a4e437b38da49d6721ce2fef732fcf"},
+ {file = "fonttools-4.55.0-cp39-cp39-win32.whl", hash = "sha256:161d1ac54c73d82a3cded44202d0218ab007fde8cf194a23d3dd83f7177a2f03"},
+ {file = "fonttools-4.55.0-cp39-cp39-win_amd64.whl", hash = "sha256:ca7fd6987c68414fece41c96836e945e1f320cda56fc96ffdc16e54a44ec57a2"},
+ {file = "fonttools-4.55.0-py3-none-any.whl", hash = "sha256:12db5888cd4dd3fcc9f0ee60c6edd3c7e1fd44b7dd0f31381ea03df68f8a153f"},
+ {file = "fonttools-4.55.0.tar.gz", hash = "sha256:7636acc6ab733572d5e7eec922b254ead611f1cdad17be3f0be7418e8bfaca71"},
]
[package.extras]
@@ -3350,13 +3322,13 @@ xai = ["tensorflow (>=2.3.0,<3.0.0dev)"]
[[package]]
name = "google-cloud-bigquery"
-version = "3.26.0"
+version = "3.27.0"
description = "Google BigQuery API client library"
optional = false
python-versions = ">=3.7"
files = [
- {file = "google_cloud_bigquery-3.26.0-py2.py3-none-any.whl", hash = "sha256:e0e9ad28afa67a18696e624cbccab284bf2c0a3f6eeb9eeb0426c69b943793a8"},
- {file = "google_cloud_bigquery-3.26.0.tar.gz", hash = "sha256:edbdc788beea659e04c0af7fe4dcd6d9155344b98951a0d5055bd2f15da4ba23"},
+ {file = "google_cloud_bigquery-3.27.0-py2.py3-none-any.whl", hash = "sha256:b53b0431e5ba362976a4cd8acce72194b4116cdf8115030c7b339b884603fcc3"},
+ {file = "google_cloud_bigquery-3.27.0.tar.gz", hash = "sha256:379c524054d7b090fa56d0c22662cc6e6458a6229b6754c0e7177e3a73421d2c"},
]
[package.dependencies]
@@ -3399,13 +3371,13 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"]
[[package]]
name = "google-cloud-resource-manager"
-version = "1.13.0"
+version = "1.13.1"
description = "Google Cloud Resource Manager API client library"
optional = false
python-versions = ">=3.7"
files = [
- {file = "google_cloud_resource_manager-1.13.0-py2.py3-none-any.whl", hash = "sha256:33beb4528c2b7aee7a97ed843710581a7b4a27f3dd1fa41a0bf3359b3d68853f"},
- {file = "google_cloud_resource_manager-1.13.0.tar.gz", hash = "sha256:ae4bf69443f14b37007d4d84150115b0942e8b01650fd7a1fc6ff4dc1760e5c4"},
+ {file = "google_cloud_resource_manager-1.13.1-py2.py3-none-any.whl", hash = "sha256:abdc7d443ab6c0763b8ed49ab59203e223f14c683df69e3748d5eb2237475f5f"},
+ {file = "google_cloud_resource_manager-1.13.1.tar.gz", hash = "sha256:bee9f2fb1d856731182b7cc05980d216aae848947ccdadf2848a2c64ccd6bbea"},
]
[package.dependencies]
@@ -3552,13 +3524,13 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"]
[[package]]
name = "gotrue"
-version = "2.9.3"
+version = "2.10.0"
description = "Python Client Library for Supabase Auth"
optional = false
python-versions = "<4.0,>=3.9"
files = [
- {file = "gotrue-2.9.3-py3-none-any.whl", hash = "sha256:9d2e9c74405d879f4828e0a7b94daf167a6e109c10ae6e5c59a0e21446f6e423"},
- {file = "gotrue-2.9.3.tar.gz", hash = "sha256:051551d80e642bdd2ab42cac78207745d89a2a08f429a1512d82624e675d8255"},
+ {file = "gotrue-2.10.0-py3-none-any.whl", hash = "sha256:768e58207488e5184ffbdc4351b7280d913daf97962f4e9f2cca05c80004b042"},
+ {file = "gotrue-2.10.0.tar.gz", hash = "sha256:4edf4c251da3535f2b044e23deba221e848ca1210c17d0c7a9b19f79a1e3f3c0"},
]
[package.dependencies]
@@ -3669,70 +3641,70 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4
[[package]]
name = "grpcio"
-version = "1.67.1"
+version = "1.68.0"
description = "HTTP/2-based RPC framework"
optional = false
python-versions = ">=3.8"
files = [
- {file = "grpcio-1.67.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f"},
- {file = "grpcio-1.67.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d"},
- {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:43112046864317498a33bdc4797ae6a268c36345a910de9b9c17159d8346602f"},
- {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9b929f13677b10f63124c1a410994a401cdd85214ad83ab67cc077fc7e480f0"},
- {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7d1797a8a3845437d327145959a2c0c47c05947c9eef5ff1a4c80e499dcc6fa"},
- {file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0489063974d1452436139501bf6b180f63d4977223ee87488fe36858c5725292"},
- {file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9fd042de4a82e3e7aca44008ee2fb5da01b3e5adb316348c21980f7f58adc311"},
- {file = "grpcio-1.67.1-cp310-cp310-win32.whl", hash = "sha256:638354e698fd0c6c76b04540a850bf1db27b4d2515a19fcd5cf645c48d3eb1ed"},
- {file = "grpcio-1.67.1-cp310-cp310-win_amd64.whl", hash = "sha256:608d87d1bdabf9e2868b12338cd38a79969eaf920c89d698ead08f48de9c0f9e"},
- {file = "grpcio-1.67.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:7818c0454027ae3384235a65210bbf5464bd715450e30a3d40385453a85a70cb"},
- {file = "grpcio-1.67.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ea33986b70f83844cd00814cee4451055cd8cab36f00ac64a31f5bb09b31919e"},
- {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c7a01337407dd89005527623a4a72c5c8e2894d22bead0895306b23c6695698f"},
- {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b866f73224b0634f4312a4674c1be21b2b4afa73cb20953cbbb73a6b36c3cc"},
- {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9fff78ba10d4250bfc07a01bd6254a6d87dc67f9627adece85c0b2ed754fa96"},
- {file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8a23cbcc5bb11ea7dc6163078be36c065db68d915c24f5faa4f872c573bb400f"},
- {file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1a65b503d008f066e994f34f456e0647e5ceb34cfcec5ad180b1b44020ad4970"},
- {file = "grpcio-1.67.1-cp311-cp311-win32.whl", hash = "sha256:e29ca27bec8e163dca0c98084040edec3bc49afd10f18b412f483cc68c712744"},
- {file = "grpcio-1.67.1-cp311-cp311-win_amd64.whl", hash = "sha256:786a5b18544622bfb1e25cc08402bd44ea83edfb04b93798d85dca4d1a0b5be5"},
- {file = "grpcio-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953"},
- {file = "grpcio-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb"},
- {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0"},
- {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af"},
- {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e"},
- {file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75"},
- {file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38"},
- {file = "grpcio-1.67.1-cp312-cp312-win32.whl", hash = "sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78"},
- {file = "grpcio-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc"},
- {file = "grpcio-1.67.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b"},
- {file = "grpcio-1.67.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1"},
- {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af"},
- {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955"},
- {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8"},
- {file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62"},
- {file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb"},
- {file = "grpcio-1.67.1-cp313-cp313-win32.whl", hash = "sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121"},
- {file = "grpcio-1.67.1-cp313-cp313-win_amd64.whl", hash = "sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba"},
- {file = "grpcio-1.67.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:178f5db771c4f9a9facb2ab37a434c46cb9be1a75e820f187ee3d1e7805c4f65"},
- {file = "grpcio-1.67.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f3e49c738396e93b7ba9016e153eb09e0778e776df6090c1b8c91877cc1c426"},
- {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:24e8a26dbfc5274d7474c27759b54486b8de23c709d76695237515bc8b5baeab"},
- {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b6c16489326d79ead41689c4b84bc40d522c9a7617219f4ad94bc7f448c5085"},
- {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e6a4dcf5af7bbc36fd9f81c9f372e8ae580870a9e4b6eafe948cd334b81cf3"},
- {file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:95b5f2b857856ed78d72da93cd7d09b6db8ef30102e5e7fe0961fe4d9f7d48e8"},
- {file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b49359977c6ec9f5d0573ea4e0071ad278ef905aa74e420acc73fd28ce39e9ce"},
- {file = "grpcio-1.67.1-cp38-cp38-win32.whl", hash = "sha256:f5b76ff64aaac53fede0cc93abf57894ab2a7362986ba22243d06218b93efe46"},
- {file = "grpcio-1.67.1-cp38-cp38-win_amd64.whl", hash = "sha256:804c6457c3cd3ec04fe6006c739579b8d35c86ae3298ffca8de57b493524b771"},
- {file = "grpcio-1.67.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:a25bdea92b13ff4d7790962190bf6bf5c4639876e01c0f3dda70fc2769616335"},
- {file = "grpcio-1.67.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cdc491ae35a13535fd9196acb5afe1af37c8237df2e54427be3eecda3653127e"},
- {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:85f862069b86a305497e74d0dc43c02de3d1d184fc2c180993aa8aa86fbd19b8"},
- {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec74ef02010186185de82cc594058a3ccd8d86821842bbac9873fd4a2cf8be8d"},
- {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01f616a964e540638af5130469451cf580ba8c7329f45ca998ab66e0c7dcdb04"},
- {file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:299b3d8c4f790c6bcca485f9963b4846dd92cf6f1b65d3697145d005c80f9fe8"},
- {file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:60336bff760fbb47d7e86165408126f1dded184448e9a4c892189eb7c9d3f90f"},
- {file = "grpcio-1.67.1-cp39-cp39-win32.whl", hash = "sha256:5ed601c4c6008429e3d247ddb367fe8c7259c355757448d7c1ef7bd4a6739e8e"},
- {file = "grpcio-1.67.1-cp39-cp39-win_amd64.whl", hash = "sha256:5db70d32d6703b89912af16d6d45d78406374a8b8ef0d28140351dd0ec610e98"},
- {file = "grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732"},
+ {file = "grpcio-1.68.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:619b5d0f29f4f5351440e9343224c3e19912c21aeda44e0c49d0d147a8d01544"},
+ {file = "grpcio-1.68.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a59f5822f9459bed098ffbceb2713abbf7c6fd13f2b9243461da5c338d0cd6c3"},
+ {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:c03d89df516128febc5a7e760d675b478ba25802447624edf7aa13b1e7b11e2a"},
+ {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44bcbebb24363d587472089b89e2ea0ab2e2b4df0e4856ba4c0b087c82412121"},
+ {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79f81b7fbfb136247b70465bd836fa1733043fdee539cd6031cb499e9608a110"},
+ {file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:88fb2925789cfe6daa20900260ef0a1d0a61283dfb2d2fffe6194396a354c618"},
+ {file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:99f06232b5c9138593ae6f2e355054318717d32a9c09cdc5a2885540835067a1"},
+ {file = "grpcio-1.68.0-cp310-cp310-win32.whl", hash = "sha256:a6213d2f7a22c3c30a479fb5e249b6b7e648e17f364598ff64d08a5136fe488b"},
+ {file = "grpcio-1.68.0-cp310-cp310-win_amd64.whl", hash = "sha256:15327ab81131ef9b94cb9f45b5bd98803a179c7c61205c8c0ac9aff9d6c4e82a"},
+ {file = "grpcio-1.68.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:3b2b559beb2d433129441783e5f42e3be40a9e1a89ec906efabf26591c5cd415"},
+ {file = "grpcio-1.68.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e46541de8425a4d6829ac6c5d9b16c03c292105fe9ebf78cb1c31e8d242f9155"},
+ {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c1245651f3c9ea92a2db4f95d37b7597db6b246d5892bca6ee8c0e90d76fb73c"},
+ {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1931c7aa85be0fa6cea6af388e576f3bf6baee9e5d481c586980c774debcb4"},
+ {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ff09c81e3aded7a183bc6473639b46b6caa9c1901d6f5e2cba24b95e59e30"},
+ {file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8c73f9fbbaee1a132487e31585aa83987ddf626426d703ebcb9a528cf231c9b1"},
+ {file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6b2f98165ea2790ea159393a2246b56f580d24d7da0d0342c18a085299c40a75"},
+ {file = "grpcio-1.68.0-cp311-cp311-win32.whl", hash = "sha256:e1e7ed311afb351ff0d0e583a66fcb39675be112d61e7cfd6c8269884a98afbc"},
+ {file = "grpcio-1.68.0-cp311-cp311-win_amd64.whl", hash = "sha256:e0d2f68eaa0a755edd9a47d40e50dba6df2bceda66960dee1218da81a2834d27"},
+ {file = "grpcio-1.68.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8af6137cc4ae8e421690d276e7627cfc726d4293f6607acf9ea7260bd8fc3d7d"},
+ {file = "grpcio-1.68.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4028b8e9a3bff6f377698587d642e24bd221810c06579a18420a17688e421af7"},
+ {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f60fa2adf281fd73ae3a50677572521edca34ba373a45b457b5ebe87c2d01e1d"},
+ {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e18589e747c1e70b60fab6767ff99b2d0c359ea1db8a2cb524477f93cdbedf5b"},
+ {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0d30f3fee9372796f54d3100b31ee70972eaadcc87314be369360248a3dcffe"},
+ {file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7e0a3e72c0e9a1acab77bef14a73a416630b7fd2cbd893c0a873edc47c42c8cd"},
+ {file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a831dcc343440969aaa812004685ed322cdb526cd197112d0db303b0da1e8659"},
+ {file = "grpcio-1.68.0-cp312-cp312-win32.whl", hash = "sha256:5a180328e92b9a0050958ced34dddcb86fec5a8b332f5a229e353dafc16cd332"},
+ {file = "grpcio-1.68.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bddd04a790b69f7a7385f6a112f46ea0b34c4746f361ebafe9ca0be567c78e9"},
+ {file = "grpcio-1.68.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:fc05759ffbd7875e0ff2bd877be1438dfe97c9312bbc558c8284a9afa1d0f40e"},
+ {file = "grpcio-1.68.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:15fa1fe25d365a13bc6d52fcac0e3ee1f9baebdde2c9b3b2425f8a4979fccea1"},
+ {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:32a9cb4686eb2e89d97022ecb9e1606d132f85c444354c17a7dbde4a455e4a3b"},
+ {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dba037ff8d284c8e7ea9a510c8ae0f5b016004f13c3648f72411c464b67ff2fb"},
+ {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0efbbd849867e0e569af09e165363ade75cf84f5229b2698d53cf22c7a4f9e21"},
+ {file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:4e300e6978df0b65cc2d100c54e097c10dfc7018b9bd890bbbf08022d47f766d"},
+ {file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:6f9c7ad1a23e1047f827385f4713b5b8c6c7d325705be1dd3e31fb00dcb2f665"},
+ {file = "grpcio-1.68.0-cp313-cp313-win32.whl", hash = "sha256:3ac7f10850fd0487fcce169c3c55509101c3bde2a3b454869639df2176b60a03"},
+ {file = "grpcio-1.68.0-cp313-cp313-win_amd64.whl", hash = "sha256:afbf45a62ba85a720491bfe9b2642f8761ff348006f5ef67e4622621f116b04a"},
+ {file = "grpcio-1.68.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:f8f695d9576ce836eab27ba7401c60acaf9ef6cf2f70dfe5462055ba3df02cc3"},
+ {file = "grpcio-1.68.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9fe1b141cda52f2ca73e17d2d3c6a9f3f3a0c255c216b50ce616e9dca7e3441d"},
+ {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:4df81d78fd1646bf94ced4fb4cd0a7fe2e91608089c522ef17bc7db26e64effd"},
+ {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46a2d74d4dd8993151c6cd585594c082abe74112c8e4175ddda4106f2ceb022f"},
+ {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a17278d977746472698460c63abf333e1d806bd41f2224f90dbe9460101c9796"},
+ {file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:15377bce516b1c861c35e18eaa1c280692bf563264836cece693c0f169b48829"},
+ {file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc5f0a4f5904b8c25729a0498886b797feb817d1fd3812554ffa39551112c161"},
+ {file = "grpcio-1.68.0-cp38-cp38-win32.whl", hash = "sha256:def1a60a111d24376e4b753db39705adbe9483ef4ca4761f825639d884d5da78"},
+ {file = "grpcio-1.68.0-cp38-cp38-win_amd64.whl", hash = "sha256:55d3b52fd41ec5772a953612db4e70ae741a6d6ed640c4c89a64f017a1ac02b5"},
+ {file = "grpcio-1.68.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:0d230852ba97654453d290e98d6aa61cb48fa5fafb474fb4c4298d8721809354"},
+ {file = "grpcio-1.68.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:50992f214264e207e07222703c17d9cfdcc2c46ed5a1ea86843d440148ebbe10"},
+ {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:14331e5c27ed3545360464a139ed279aa09db088f6e9502e95ad4bfa852bb116"},
+ {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f84890b205692ea813653ece4ac9afa2139eae136e419231b0eec7c39fdbe4c2"},
+ {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0cf343c6f4f6aa44863e13ec9ddfe299e0be68f87d68e777328bff785897b05"},
+ {file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fd2c2d47969daa0e27eadaf15c13b5e92605c5e5953d23c06d0b5239a2f176d3"},
+ {file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:18668e36e7f4045820f069997834e94e8275910b1f03e078a6020bd464cb2363"},
+ {file = "grpcio-1.68.0-cp39-cp39-win32.whl", hash = "sha256:2af76ab7c427aaa26aa9187c3e3c42f38d3771f91a20f99657d992afada2294a"},
+ {file = "grpcio-1.68.0-cp39-cp39-win_amd64.whl", hash = "sha256:e694b5928b7b33ca2d3b4d5f9bf8b5888906f181daff6b406f4938f3a997a490"},
+ {file = "grpcio-1.68.0.tar.gz", hash = "sha256:7e7483d39b4a4fddb9906671e9ea21aaad4f031cdfc349fec76bdfa1e404543a"},
]
[package.extras]
-protobuf = ["grpcio-tools (>=1.67.1)"]
+protobuf = ["grpcio-tools (>=1.68.0)"]
[[package]]
name = "grpcio-status"
@@ -3996,13 +3968,13 @@ lxml = ["lxml"]
[[package]]
name = "httpcore"
-version = "1.0.6"
+version = "1.0.7"
description = "A minimal low-level HTTP client."
optional = false
python-versions = ">=3.8"
files = [
- {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"},
- {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"},
+ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"},
+ {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"},
]
[package.dependencies]
@@ -4303,84 +4275,84 @@ i18n = ["Babel (>=2.7)"]
[[package]]
name = "jiter"
-version = "0.6.1"
+version = "0.7.1"
description = "Fast iterable JSON parser."
optional = false
python-versions = ">=3.8"
files = [
- {file = "jiter-0.6.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d08510593cb57296851080018006dfc394070178d238b767b1879dc1013b106c"},
- {file = "jiter-0.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adef59d5e2394ebbad13b7ed5e0306cceb1df92e2de688824232a91588e77aa7"},
- {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3e02f7a27f2bcc15b7d455c9df05df8ffffcc596a2a541eeda9a3110326e7a3"},
- {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed69a7971d67b08f152c17c638f0e8c2aa207e9dd3a5fcd3cba294d39b5a8d2d"},
- {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2019d966e98f7c6df24b3b8363998575f47d26471bfb14aade37630fae836a1"},
- {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36c0b51a285b68311e207a76c385650322734c8717d16c2eb8af75c9d69506e7"},
- {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:220e0963b4fb507c525c8f58cde3da6b1be0bfddb7ffd6798fb8f2531226cdb1"},
- {file = "jiter-0.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa25c7a9bf7875a141182b9c95aed487add635da01942ef7ca726e42a0c09058"},
- {file = "jiter-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e90552109ca8ccd07f47ca99c8a1509ced93920d271bb81780a973279974c5ab"},
- {file = "jiter-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:67723a011964971864e0b484b0ecfee6a14de1533cff7ffd71189e92103b38a8"},
- {file = "jiter-0.6.1-cp310-none-win32.whl", hash = "sha256:33af2b7d2bf310fdfec2da0177eab2fedab8679d1538d5b86a633ebfbbac4edd"},
- {file = "jiter-0.6.1-cp310-none-win_amd64.whl", hash = "sha256:7cea41c4c673353799906d940eee8f2d8fd1d9561d734aa921ae0f75cb9732f4"},
- {file = "jiter-0.6.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b03c24e7da7e75b170c7b2b172d9c5e463aa4b5c95696a368d52c295b3f6847f"},
- {file = "jiter-0.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:47fee1be677b25d0ef79d687e238dc6ac91a8e553e1a68d0839f38c69e0ee491"},
- {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0d2f6e01a8a0fb0eab6d0e469058dab2be46ff3139ed2d1543475b5a1d8e7"},
- {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b809e39e342c346df454b29bfcc7bca3d957f5d7b60e33dae42b0e5ec13e027"},
- {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e9ac7c2f092f231f5620bef23ce2e530bd218fc046098747cc390b21b8738a7a"},
- {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e51a2d80d5fe0ffb10ed2c82b6004458be4a3f2b9c7d09ed85baa2fbf033f54b"},
- {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3343d4706a2b7140e8bd49b6c8b0a82abf9194b3f0f5925a78fc69359f8fc33c"},
- {file = "jiter-0.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82521000d18c71e41c96960cb36e915a357bc83d63a8bed63154b89d95d05ad1"},
- {file = "jiter-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c843e7c1633470708a3987e8ce617ee2979ee18542d6eb25ae92861af3f1d62"},
- {file = "jiter-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a2e861658c3fe849efc39b06ebb98d042e4a4c51a8d7d1c3ddc3b1ea091d0784"},
- {file = "jiter-0.6.1-cp311-none-win32.whl", hash = "sha256:7d72fc86474862c9c6d1f87b921b70c362f2b7e8b2e3c798bb7d58e419a6bc0f"},
- {file = "jiter-0.6.1-cp311-none-win_amd64.whl", hash = "sha256:3e36a320634f33a07794bb15b8da995dccb94f944d298c8cfe2bd99b1b8a574a"},
- {file = "jiter-0.6.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1fad93654d5a7dcce0809aff66e883c98e2618b86656aeb2129db2cd6f26f867"},
- {file = "jiter-0.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4e6e340e8cd92edab7f6a3a904dbbc8137e7f4b347c49a27da9814015cc0420c"},
- {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:691352e5653af84ed71763c3c427cff05e4d658c508172e01e9c956dfe004aba"},
- {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:defee3949313c1f5b55e18be45089970cdb936eb2a0063f5020c4185db1b63c9"},
- {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26d2bdd5da097e624081c6b5d416d3ee73e5b13f1703bcdadbb1881f0caa1933"},
- {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18aa9d1626b61c0734b973ed7088f8a3d690d0b7f5384a5270cd04f4d9f26c86"},
- {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a3567c8228afa5ddcce950631c6b17397ed178003dc9ee7e567c4c4dcae9fa0"},
- {file = "jiter-0.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5c0507131c922defe3f04c527d6838932fcdfd69facebafd7d3574fa3395314"},
- {file = "jiter-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:540fcb224d7dc1bcf82f90f2ffb652df96f2851c031adca3c8741cb91877143b"},
- {file = "jiter-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e7b75436d4fa2032b2530ad989e4cb0ca74c655975e3ff49f91a1a3d7f4e1df2"},
- {file = "jiter-0.6.1-cp312-none-win32.whl", hash = "sha256:883d2ced7c21bf06874fdeecab15014c1c6d82216765ca6deef08e335fa719e0"},
- {file = "jiter-0.6.1-cp312-none-win_amd64.whl", hash = "sha256:91e63273563401aadc6c52cca64a7921c50b29372441adc104127b910e98a5b6"},
- {file = "jiter-0.6.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:852508a54fe3228432e56019da8b69208ea622a3069458252f725d634e955b31"},
- {file = "jiter-0.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f491cc69ff44e5a1e8bc6bf2b94c1f98d179e1aaf4a554493c171a5b2316b701"},
- {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc56c8f0b2a28ad4d8047f3ae62d25d0e9ae01b99940ec0283263a04724de1f3"},
- {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51b58f7a0d9e084a43b28b23da2b09fc5e8df6aa2b6a27de43f991293cab85fd"},
- {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f79ce15099154c90ef900d69c6b4c686b64dfe23b0114e0971f2fecd306ec6c"},
- {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:03a025b52009f47e53ea619175d17e4ded7c035c6fbd44935cb3ada11e1fd592"},
- {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c74a8d93718137c021d9295248a87c2f9fdc0dcafead12d2930bc459ad40f885"},
- {file = "jiter-0.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40b03b75f903975f68199fc4ec73d546150919cb7e534f3b51e727c4d6ccca5a"},
- {file = "jiter-0.6.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:825651a3f04cf92a661d22cad61fc913400e33aa89b3e3ad9a6aa9dc8a1f5a71"},
- {file = "jiter-0.6.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:928bf25eb69ddb292ab8177fe69d3fbf76c7feab5fce1c09265a7dccf25d3991"},
- {file = "jiter-0.6.1-cp313-none-win32.whl", hash = "sha256:352cd24121e80d3d053fab1cc9806258cad27c53cad99b7a3cac57cf934b12e4"},
- {file = "jiter-0.6.1-cp313-none-win_amd64.whl", hash = "sha256:be7503dd6f4bf02c2a9bacb5cc9335bc59132e7eee9d3e931b13d76fd80d7fda"},
- {file = "jiter-0.6.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:31d8e00e1fb4c277df8ab6f31a671f509ebc791a80e5c61fdc6bc8696aaa297c"},
- {file = "jiter-0.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77c296d65003cd7ee5d7b0965f6acbe6cffaf9d1fa420ea751f60ef24e85fed5"},
- {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeeb0c0325ef96c12a48ea7e23e2e86fe4838e6e0a995f464cf4c79fa791ceeb"},
- {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a31c6fcbe7d6c25d6f1cc6bb1cba576251d32795d09c09961174fe461a1fb5bd"},
- {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59e2b37f3b9401fc9e619f4d4badcab2e8643a721838bcf695c2318a0475ae42"},
- {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bae5ae4853cb9644144e9d0755854ce5108d470d31541d83f70ca7ecdc2d1637"},
- {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9df588e9c830b72d8db1dd7d0175af6706b0904f682ea9b1ca8b46028e54d6e9"},
- {file = "jiter-0.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15f8395e835cf561c85c1adee72d899abf2733d9df72e9798e6d667c9b5c1f30"},
- {file = "jiter-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a99d4e0b5fc3b05ea732d67eb2092fe894e95a90e6e413f2ea91387e228a307"},
- {file = "jiter-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a311df1fa6be0ccd64c12abcd85458383d96e542531bafbfc0a16ff6feda588f"},
- {file = "jiter-0.6.1-cp38-none-win32.whl", hash = "sha256:81116a6c272a11347b199f0e16b6bd63f4c9d9b52bc108991397dd80d3c78aba"},
- {file = "jiter-0.6.1-cp38-none-win_amd64.whl", hash = "sha256:13f9084e3e871a7c0b6e710db54444088b1dd9fbefa54d449b630d5e73bb95d0"},
- {file = "jiter-0.6.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f1c53615fcfec3b11527c08d19cff6bc870da567ce4e57676c059a3102d3a082"},
- {file = "jiter-0.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f791b6a4da23238c17a81f44f5b55d08a420c5692c1fda84e301a4b036744eb1"},
- {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c97e90fec2da1d5f68ef121444c2c4fa72eabf3240829ad95cf6bbeca42a301"},
- {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3cbc1a66b4e41511209e97a2866898733c0110b7245791ac604117b7fb3fedb7"},
- {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4e85f9e12cd8418ab10e1fcf0e335ae5bb3da26c4d13a0fd9e6a17a674783b6"},
- {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08be33db6dcc374c9cc19d3633af5e47961a7b10d4c61710bd39e48d52a35824"},
- {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:677be9550004f5e010d673d3b2a2b815a8ea07a71484a57d3f85dde7f14cf132"},
- {file = "jiter-0.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8bd065be46c2eecc328e419d6557bbc37844c88bb07b7a8d2d6c91c7c4dedc9"},
- {file = "jiter-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bd95375ce3609ec079a97c5d165afdd25693302c071ca60c7ae1cf826eb32022"},
- {file = "jiter-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db459ed22d0208940d87f614e1f0ea5a946d29a3cfef71f7e1aab59b6c6b2afb"},
- {file = "jiter-0.6.1-cp39-none-win32.whl", hash = "sha256:d71c962f0971347bd552940ab96aa42ceefcd51b88c4ced8a27398182efa8d80"},
- {file = "jiter-0.6.1-cp39-none-win_amd64.whl", hash = "sha256:d465db62d2d10b489b7e7a33027c4ae3a64374425d757e963f86df5b5f2e7fc5"},
- {file = "jiter-0.6.1.tar.gz", hash = "sha256:e19cd21221fc139fb032e4112986656cb2739e9fe6d84c13956ab30ccc7d4449"},
+ {file = "jiter-0.7.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:262e96d06696b673fad6f257e6a0abb6e873dc22818ca0e0600f4a1189eb334f"},
+ {file = "jiter-0.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be6de02939aac5be97eb437f45cfd279b1dc9de358b13ea6e040e63a3221c40d"},
+ {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935f10b802bc1ce2b2f61843e498c7720aa7f4e4bb7797aa8121eab017293c3d"},
+ {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9cd3cccccabf5064e4bb3099c87bf67db94f805c1e62d1aefd2b7476e90e0ee2"},
+ {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4aa919ebfc5f7b027cc368fe3964c0015e1963b92e1db382419dadb098a05192"},
+ {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ae2d01e82c94491ce4d6f461a837f63b6c4e6dd5bb082553a70c509034ff3d4"},
+ {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f9568cd66dbbdab67ae1b4c99f3f7da1228c5682d65913e3f5f95586b3cb9a9"},
+ {file = "jiter-0.7.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ecbf4e20ec2c26512736284dc1a3f8ed79b6ca7188e3b99032757ad48db97dc"},
+ {file = "jiter-0.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1a0508fddc70ce00b872e463b387d49308ef02b0787992ca471c8d4ba1c0fa1"},
+ {file = "jiter-0.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f84c9996664c460f24213ff1e5881530abd8fafd82058d39af3682d5fd2d6316"},
+ {file = "jiter-0.7.1-cp310-none-win32.whl", hash = "sha256:c915e1a1960976ba4dfe06551ea87063b2d5b4d30759012210099e712a414d9f"},
+ {file = "jiter-0.7.1-cp310-none-win_amd64.whl", hash = "sha256:75bf3b7fdc5c0faa6ffffcf8028a1f974d126bac86d96490d1b51b3210aa0f3f"},
+ {file = "jiter-0.7.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ad04a23a91f3d10d69d6c87a5f4471b61c2c5cd6e112e85136594a02043f462c"},
+ {file = "jiter-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e47a554de88dff701226bb5722b7f1b6bccd0b98f1748459b7e56acac2707a5"},
+ {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e44fff69c814a2e96a20b4ecee3e2365e9b15cf5fe4e00869d18396daa91dab"},
+ {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df0a1d05081541b45743c965436f8b5a1048d6fd726e4a030113a2699a6046ea"},
+ {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f22cf8f236a645cb6d8ffe2a64edb5d2b66fb148bf7c75eea0cb36d17014a7bc"},
+ {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8589f50b728ea4bf22e0632eefa125c8aa9c38ed202a5ee6ca371f05eeb3ff"},
+ {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f20de711224f2ca2dbb166a8d512f6ff48c9c38cc06b51f796520eb4722cc2ce"},
+ {file = "jiter-0.7.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a9803396032117b85ec8cbf008a54590644a062fedd0425cbdb95e4b2b60479"},
+ {file = "jiter-0.7.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3d8bae77c82741032e9d89a4026479061aba6e646de3bf5f2fc1ae2bbd9d06e0"},
+ {file = "jiter-0.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3dc9939e576bbc68c813fc82f6620353ed68c194c7bcf3d58dc822591ec12490"},
+ {file = "jiter-0.7.1-cp311-none-win32.whl", hash = "sha256:f7605d24cd6fab156ec89e7924578e21604feee9c4f1e9da34d8b67f63e54892"},
+ {file = "jiter-0.7.1-cp311-none-win_amd64.whl", hash = "sha256:f3ea649e7751a1a29ea5ecc03c4ada0a833846c59c6da75d747899f9b48b7282"},
+ {file = "jiter-0.7.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ad36a1155cbd92e7a084a568f7dc6023497df781adf2390c345dd77a120905ca"},
+ {file = "jiter-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7ba52e6aaed2dc5c81a3d9b5e4ab95b039c4592c66ac973879ba57c3506492bb"},
+ {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b7de0b6f6728b678540c7927587e23f715284596724be203af952418acb8a2d"},
+ {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9463b62bd53c2fb85529c700c6a3beb2ee54fde8bef714b150601616dcb184a6"},
+ {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:627164ec01d28af56e1f549da84caf0fe06da3880ebc7b7ee1ca15df106ae172"},
+ {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25d0e5bf64e368b0aa9e0a559c3ab2f9b67e35fe7269e8a0d81f48bbd10e8963"},
+ {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c244261306f08f8008b3087059601997016549cb8bb23cf4317a4827f07b7d74"},
+ {file = "jiter-0.7.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ded4e4b75b68b843b7cea5cd7c55f738c20e1394c68c2cb10adb655526c5f1b"},
+ {file = "jiter-0.7.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:80dae4f1889b9d09e5f4de6b58c490d9c8ce7730e35e0b8643ab62b1538f095c"},
+ {file = "jiter-0.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5970cf8ec943b51bce7f4b98d2e1ed3ada170c2a789e2db3cb484486591a176a"},
+ {file = "jiter-0.7.1-cp312-none-win32.whl", hash = "sha256:701d90220d6ecb3125d46853c8ca8a5bc158de8c49af60fd706475a49fee157e"},
+ {file = "jiter-0.7.1-cp312-none-win_amd64.whl", hash = "sha256:7824c3ecf9ecf3321c37f4e4d4411aad49c666ee5bc2a937071bdd80917e4533"},
+ {file = "jiter-0.7.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:097676a37778ba3c80cb53f34abd6943ceb0848263c21bf423ae98b090f6c6ba"},
+ {file = "jiter-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3298af506d4271257c0a8f48668b0f47048d69351675dd8500f22420d4eec378"},
+ {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12fd88cfe6067e2199964839c19bd2b422ca3fd792949b8f44bb8a4e7d21946a"},
+ {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dacca921efcd21939123c8ea8883a54b9fa7f6545c8019ffcf4f762985b6d0c8"},
+ {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de3674a5fe1f6713a746d25ad9c32cd32fadc824e64b9d6159b3b34fd9134143"},
+ {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65df9dbae6d67e0788a05b4bad5706ad40f6f911e0137eb416b9eead6ba6f044"},
+ {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ba9a358d59a0a55cccaa4957e6ae10b1a25ffdabda863c0343c51817610501d"},
+ {file = "jiter-0.7.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:576eb0f0c6207e9ede2b11ec01d9c2182973986514f9c60bc3b3b5d5798c8f50"},
+ {file = "jiter-0.7.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:e550e29cdf3577d2c970a18f3959e6b8646fd60ef1b0507e5947dc73703b5627"},
+ {file = "jiter-0.7.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:81d968dbf3ce0db2e0e4dec6b0a0d5d94f846ee84caf779b07cab49f5325ae43"},
+ {file = "jiter-0.7.1-cp313-none-win32.whl", hash = "sha256:f892e547e6e79a1506eb571a676cf2f480a4533675f834e9ae98de84f9b941ac"},
+ {file = "jiter-0.7.1-cp313-none-win_amd64.whl", hash = "sha256:0302f0940b1455b2a7fb0409b8d5b31183db70d2b07fd177906d83bf941385d1"},
+ {file = "jiter-0.7.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c65a3ce72b679958b79d556473f192a4dfc5895e8cc1030c9f4e434690906076"},
+ {file = "jiter-0.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e80052d3db39f9bb8eb86d207a1be3d9ecee5e05fdec31380817f9609ad38e60"},
+ {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70a497859c4f3f7acd71c8bd89a6f9cf753ebacacf5e3e799138b8e1843084e3"},
+ {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c1288bc22b9e36854a0536ba83666c3b1fb066b811019d7b682c9cf0269cdf9f"},
+ {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b096ca72dd38ef35675e1d3b01785874315182243ef7aea9752cb62266ad516f"},
+ {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8dbbd52c50b605af13dbee1a08373c520e6fcc6b5d32f17738875847fea4e2cd"},
+ {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af29c5c6eb2517e71ffa15c7ae9509fa5e833ec2a99319ac88cc271eca865519"},
+ {file = "jiter-0.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f114a4df1e40c03c0efbf974b376ed57756a1141eb27d04baee0680c5af3d424"},
+ {file = "jiter-0.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:191fbaee7cf46a9dd9b817547bf556facde50f83199d07fc48ebeff4082f9df4"},
+ {file = "jiter-0.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0e2b445e5ee627fb4ee6bbceeb486251e60a0c881a8e12398dfdff47c56f0723"},
+ {file = "jiter-0.7.1-cp38-none-win32.whl", hash = "sha256:47ac4c3cf8135c83e64755b7276339b26cd3c7ddadf9e67306ace4832b283edf"},
+ {file = "jiter-0.7.1-cp38-none-win_amd64.whl", hash = "sha256:60b49c245cd90cde4794f5c30f123ee06ccf42fb8730a019a2870cd005653ebd"},
+ {file = "jiter-0.7.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8f212eeacc7203256f526f550d105d8efa24605828382cd7d296b703181ff11d"},
+ {file = "jiter-0.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d9e247079d88c00e75e297e6cb3a18a039ebcd79fefc43be9ba4eb7fb43eb726"},
+ {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0aacaa56360139c53dcf352992b0331f4057a0373bbffd43f64ba0c32d2d155"},
+ {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc1b55314ca97dbb6c48d9144323896e9c1a25d41c65bcb9550b3e0c270ca560"},
+ {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f281aae41b47e90deb70e7386558e877a8e62e1693e0086f37d015fa1c102289"},
+ {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93c20d2730a84d43f7c0b6fb2579dc54335db742a59cf9776d0b80e99d587382"},
+ {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e81ccccd8069110e150613496deafa10da2f6ff322a707cbec2b0d52a87b9671"},
+ {file = "jiter-0.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a7d5e85766eff4c9be481d77e2226b4c259999cb6862ccac5ef6621d3c8dcce"},
+ {file = "jiter-0.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f52ce5799df5b6975439ecb16b1e879d7655e1685b6e3758c9b1b97696313bfb"},
+ {file = "jiter-0.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0c91a0304373fdf97d56f88356a010bba442e6d995eb7773cbe32885b71cdd8"},
+ {file = "jiter-0.7.1-cp39-none-win32.whl", hash = "sha256:5c08adf93e41ce2755970e8aa95262298afe2bf58897fb9653c47cd93c3c6cdc"},
+ {file = "jiter-0.7.1-cp39-none-win_amd64.whl", hash = "sha256:6592f4067c74176e5f369228fb2995ed01400c9e8e1225fb73417183a5e635f0"},
+ {file = "jiter-0.7.1.tar.gz", hash = "sha256:448cf4f74f7363c34cdef26214da527e8eeffd88ba06d0b80b485ad0667baf5d"},
]
[[package]]
@@ -4718,13 +4690,13 @@ openai = ["openai (>=0.27.8)"]
[[package]]
name = "langsmith"
-version = "0.1.138"
+version = "0.1.143"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
- {file = "langsmith-0.1.138-py3-none-any.whl", hash = "sha256:5c2bd5c11c75f7b3d06a0f06b115186e7326ca969fd26d66ffc65a0669012aee"},
- {file = "langsmith-0.1.138.tar.gz", hash = "sha256:1ecf613bb52f6bf17f1510e24ad8b70d4b0259bc9d3dbfd69b648c66d4644f0b"},
+ {file = "langsmith-0.1.143-py3-none-any.whl", hash = "sha256:ba0d827269e9b03a90fababe41fa3e4e3f833300b95add10184f7e67167dde6f"},
+ {file = "langsmith-0.1.143.tar.gz", hash = "sha256:4c5159e5cd84b3f8499433009e72d2076dd2daf6c044ac8a3611b30d0d0161c5"},
]
[package.dependencies]
@@ -5136,13 +5108,13 @@ files = [
[[package]]
name = "marshmallow"
-version = "3.23.0"
+version = "3.23.1"
description = "A lightweight library for converting complex datatypes to and from native Python datatypes."
optional = false
python-versions = ">=3.9"
files = [
- {file = "marshmallow-3.23.0-py3-none-any.whl", hash = "sha256:82f20a2397834fe6d9611b241f2f7e7b680ed89c49f84728a1ad937be6b4bdf4"},
- {file = "marshmallow-3.23.0.tar.gz", hash = "sha256:98d8827a9f10c03d44ead298d2e99c6aea8197df18ccfad360dae7f89a50da2e"},
+ {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"},
+ {file = "marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468"},
]
[package.dependencies]
@@ -5150,7 +5122,7 @@ packaging = ">=17.0"
[package.extras]
dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"]
-docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"]
+docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.14)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"]
tests = ["pytest", "simplejson"]
[[package]]
@@ -5913,36 +5885,32 @@ tests = ["pytest", "pytest-cov"]
[[package]]
name = "onnxruntime"
-version = "1.19.2"
+version = "1.20.0"
description = "ONNX Runtime is a runtime accelerator for Machine Learning models"
optional = false
python-versions = "*"
files = [
- {file = "onnxruntime-1.19.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:84fa57369c06cadd3c2a538ae2a26d76d583e7c34bdecd5769d71ca5c0fc750e"},
- {file = "onnxruntime-1.19.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdc471a66df0c1cdef774accef69e9f2ca168c851ab5e4f2f3341512c7ef4666"},
- {file = "onnxruntime-1.19.2-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e3a4ce906105d99ebbe817f536d50a91ed8a4d1592553f49b3c23c4be2560ae6"},
- {file = "onnxruntime-1.19.2-cp310-cp310-win32.whl", hash = "sha256:4b3d723cc154c8ddeb9f6d0a8c0d6243774c6b5930847cc83170bfe4678fafb3"},
- {file = "onnxruntime-1.19.2-cp310-cp310-win_amd64.whl", hash = "sha256:17ed7382d2c58d4b7354fb2b301ff30b9bf308a1c7eac9546449cd122d21cae5"},
- {file = "onnxruntime-1.19.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:d863e8acdc7232d705d49e41087e10b274c42f09e259016a46f32c34e06dc4fd"},
- {file = "onnxruntime-1.19.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c1dfe4f660a71b31caa81fc298a25f9612815215a47b286236e61d540350d7b6"},
- {file = "onnxruntime-1.19.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a36511dc07c5c964b916697e42e366fa43c48cdb3d3503578d78cef30417cb84"},
- {file = "onnxruntime-1.19.2-cp311-cp311-win32.whl", hash = "sha256:50cbb8dc69d6befad4746a69760e5b00cc3ff0a59c6c3fb27f8afa20e2cab7e7"},
- {file = "onnxruntime-1.19.2-cp311-cp311-win_amd64.whl", hash = "sha256:1c3e5d415b78337fa0b1b75291e9ea9fb2a4c1f148eb5811e7212fed02cfffa8"},
- {file = "onnxruntime-1.19.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:68e7051bef9cfefcbb858d2d2646536829894d72a4130c24019219442b1dd2ed"},
- {file = "onnxruntime-1.19.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d2d366fbcc205ce68a8a3bde2185fd15c604d9645888703785b61ef174265168"},
- {file = "onnxruntime-1.19.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:477b93df4db467e9cbf34051662a4b27c18e131fa1836e05974eae0d6e4cf29b"},
- {file = "onnxruntime-1.19.2-cp312-cp312-win32.whl", hash = "sha256:9a174073dc5608fad05f7cf7f320b52e8035e73d80b0a23c80f840e5a97c0147"},
- {file = "onnxruntime-1.19.2-cp312-cp312-win_amd64.whl", hash = "sha256:190103273ea4507638ffc31d66a980594b237874b65379e273125150eb044857"},
- {file = "onnxruntime-1.19.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:636bc1d4cc051d40bc52e1f9da87fbb9c57d9d47164695dfb1c41646ea51ea66"},
- {file = "onnxruntime-1.19.2-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5bd8b875757ea941cbcfe01582970cc299893d1b65bd56731e326a8333f638a3"},
- {file = "onnxruntime-1.19.2-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b2046fc9560f97947bbc1acbe4c6d48585ef0f12742744307d3364b131ac5778"},
- {file = "onnxruntime-1.19.2-cp38-cp38-win32.whl", hash = "sha256:31c12840b1cde4ac1f7d27d540c44e13e34f2345cf3642762d2a3333621abb6a"},
- {file = "onnxruntime-1.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:016229660adea180e9a32ce218b95f8f84860a200f0f13b50070d7d90e92956c"},
- {file = "onnxruntime-1.19.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:006c8d326835c017a9e9f74c9c77ebb570a71174a1e89fe078b29a557d9c3848"},
- {file = "onnxruntime-1.19.2-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df2a94179a42d530b936f154615b54748239c2908ee44f0d722cb4df10670f68"},
- {file = "onnxruntime-1.19.2-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fae4b4de45894b9ce7ae418c5484cbf0341db6813effec01bb2216091c52f7fb"},
- {file = "onnxruntime-1.19.2-cp39-cp39-win32.whl", hash = "sha256:dc5430f473e8706fff837ae01323be9dcfddd3ea471c900a91fa7c9b807ec5d3"},
- {file = "onnxruntime-1.19.2-cp39-cp39-win_amd64.whl", hash = "sha256:38475e29a95c5f6c62c2c603d69fc7d4c6ccbf4df602bd567b86ae1138881c49"},
+ {file = "onnxruntime-1.20.0-cp310-cp310-macosx_13_0_universal2.whl", hash = "sha256:2ac38bc6cbf7bb8527ded58711af6ef2c8c59d070f0fde58f83824422526922a"},
+ {file = "onnxruntime-1.20.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5cfd5a22abc11b273ec76fa773e22db19b749e27bf1ed05dd50d207f1817aae1"},
+ {file = "onnxruntime-1.20.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6b5daee2d03909b589f1a9ab24c325cc3c33ab7f736228158784fb1a97a92308"},
+ {file = "onnxruntime-1.20.0-cp310-cp310-win32.whl", hash = "sha256:e1eb08c13f91f830eb8df4f4e17a2a2652d1165f50bbed4f28f2afbf425c55d7"},
+ {file = "onnxruntime-1.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:cfcc1d21a12076bcc213441b405c48e1f21dedb36943e31eb93cb7a12b34678e"},
+ {file = "onnxruntime-1.20.0-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:3398354e9145c68edc09dbc72265401150027e76716ae758e8d9b52e6a7ddca0"},
+ {file = "onnxruntime-1.20.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a831b720d0a7be8241a230cb06f592e8bb66652d7cea54ce02d83769651fdee"},
+ {file = "onnxruntime-1.20.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:041fefe60af844ebd90f344c84f908201490555cd0a6d78dd0a7acdc27b59972"},
+ {file = "onnxruntime-1.20.0-cp311-cp311-win32.whl", hash = "sha256:83da64d2824809d0f6977db8bfc5091f742c26f09dfd66a3934e673780f5f87a"},
+ {file = "onnxruntime-1.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:bfa390046332f5fca6f8af8c9d17164621ac52e66b11518e187278b19364800c"},
+ {file = "onnxruntime-1.20.0-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:97c2b91bfea063f9c3457422d28a336bfd2859001cd880645adfa7184e29dd79"},
+ {file = "onnxruntime-1.20.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51e7b34e398089c4ed8d0f50722d7a64a4d5f11b38c4a42576458a03c6dbc72e"},
+ {file = "onnxruntime-1.20.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e259378ff2843321e0bf4552adcbee48822c91d77d42dde78b87dcdf10ad01f"},
+ {file = "onnxruntime-1.20.0-cp312-cp312-win32.whl", hash = "sha256:428abc1f7d8eb425887e2b7726044f2af7b5a098359455e7d2d92343f04ad0ff"},
+ {file = "onnxruntime-1.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:d5f23cbfeb546e16ffea81c28d2e796a53197fdc6c92540648e2aa53a7c7a637"},
+ {file = "onnxruntime-1.20.0-cp313-cp313-macosx_13_0_universal2.whl", hash = "sha256:95b91126bc3e1754868da1d3d2d08a7a10279b8ff5cea5e34e92fbe3fd691dcf"},
+ {file = "onnxruntime-1.20.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d57c10d7729347d6663f32b3f569f33d69a95e150d37ff6af4be9b9ab1ffdc25"},
+ {file = "onnxruntime-1.20.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b9c38735dac127d0eeb957ec312c8f1ae90ecae2779a55b2fa279aa7bd116cbd"},
+ {file = "onnxruntime-1.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:25514cec4ea251d492aa1e38a7395d8801e64a4c940a154aef84cfad97ae4628"},
+ {file = "onnxruntime-1.20.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:640ad9ea72d322f0325a51544eddb54f4fa843c4348573c88a9cb44f46678f3f"},
+ {file = "onnxruntime-1.20.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc4e7c10c98c1f407835448c26a7e14ebff3234f131e1fbc53bd9500c828df89"},
]
[package.dependencies]
@@ -6076,120 +6044,109 @@ kerberos = ["requests-kerberos"]
[[package]]
name = "opentelemetry-api"
-version = "1.27.0"
+version = "1.28.1"
description = "OpenTelemetry Python API"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"},
- {file = "opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342"},
+ {file = "opentelemetry_api-1.28.1-py3-none-any.whl", hash = "sha256:bfe86c95576cf19a914497f439fd79c9553a38de0adbdc26f7cfc46b0c00b16c"},
+ {file = "opentelemetry_api-1.28.1.tar.gz", hash = "sha256:6fa7295a12c707f5aebef82da3d9ec5afe6992f3e42bfe7bec0339a44b3518e7"},
]
[package.dependencies]
deprecated = ">=1.2.6"
-importlib-metadata = ">=6.0,<=8.4.0"
-
-[[package]]
-name = "opentelemetry-exporter-otlp-proto-common"
-version = "1.27.0"
-description = "OpenTelemetry Protobuf encoding"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "opentelemetry_exporter_otlp_proto_common-1.27.0-py3-none-any.whl", hash = "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a"},
- {file = "opentelemetry_exporter_otlp_proto_common-1.27.0.tar.gz", hash = "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8"},
-]
-
-[package.dependencies]
-opentelemetry-proto = "1.27.0"
+importlib-metadata = ">=6.0,<=8.5.0"
[[package]]
name = "opentelemetry-exporter-otlp-proto-grpc"
-version = "1.27.0"
+version = "1.15.0"
description = "OpenTelemetry Collector Protobuf over gRPC Exporter"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.7"
files = [
- {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0-py3-none-any.whl", hash = "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e"},
- {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0.tar.gz", hash = "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f"},
+ {file = "opentelemetry_exporter_otlp_proto_grpc-1.15.0-py3-none-any.whl", hash = "sha256:c2a5492ba7d140109968135d641d06ce3c5bd73c50665f787526065d57d7fd1d"},
+ {file = "opentelemetry_exporter_otlp_proto_grpc-1.15.0.tar.gz", hash = "sha256:844f2a4bb9bcda34e4eb6fe36765e5031aacb36dc60ed88c90fc246942ea26e7"},
]
[package.dependencies]
-deprecated = ">=1.2.6"
+backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""}
googleapis-common-protos = ">=1.52,<2.0"
grpcio = ">=1.0.0,<2.0.0"
-opentelemetry-api = ">=1.15,<2.0"
-opentelemetry-exporter-otlp-proto-common = "1.27.0"
-opentelemetry-proto = "1.27.0"
-opentelemetry-sdk = ">=1.27.0,<1.28.0"
+opentelemetry-api = ">=1.12,<2.0"
+opentelemetry-proto = "1.15.0"
+opentelemetry-sdk = ">=1.12,<2.0"
+
+[package.extras]
+test = ["pytest-grpc"]
[[package]]
name = "opentelemetry-instrumentation"
-version = "0.48b0"
+version = "0.49b1"
description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation-0.48b0-py3-none-any.whl", hash = "sha256:a69750dc4ba6a5c3eb67986a337185a25b739966d80479befe37b546fc870b44"},
- {file = "opentelemetry_instrumentation-0.48b0.tar.gz", hash = "sha256:94929685d906380743a71c3970f76b5f07476eea1834abd5dd9d17abfe23cc35"},
+ {file = "opentelemetry_instrumentation-0.49b1-py3-none-any.whl", hash = "sha256:0a9d3821736104013693ef3b8a9d29b41f2f3a81ee2d8c9288b52d62bae5747c"},
+ {file = "opentelemetry_instrumentation-0.49b1.tar.gz", hash = "sha256:2d0e41181b7957ba061bb436b969ad90545ac3eba65f290830009b4264d2824e"},
]
[package.dependencies]
opentelemetry-api = ">=1.4,<2.0"
-setuptools = ">=16.0"
+opentelemetry-semantic-conventions = "0.49b1"
+packaging = ">=18.0"
wrapt = ">=1.0.0,<2.0.0"
[[package]]
name = "opentelemetry-instrumentation-asgi"
-version = "0.48b0"
+version = "0.49b1"
description = "ASGI instrumentation for OpenTelemetry"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation_asgi-0.48b0-py3-none-any.whl", hash = "sha256:ddb1b5fc800ae66e85a4e2eca4d9ecd66367a8c7b556169d9e7b57e10676e44d"},
- {file = "opentelemetry_instrumentation_asgi-0.48b0.tar.gz", hash = "sha256:04c32174b23c7fa72ddfe192dad874954968a6a924608079af9952964ecdf785"},
+ {file = "opentelemetry_instrumentation_asgi-0.49b1-py3-none-any.whl", hash = "sha256:8dcbc438cb138789fcb20ae38b6e7f23088e066d77b54bae205c5744856603c6"},
+ {file = "opentelemetry_instrumentation_asgi-0.49b1.tar.gz", hash = "sha256:d1a2b4cb76490be28bcad3c0f562c4b3c84157148c922ca298bb04ed9e36c005"},
]
[package.dependencies]
asgiref = ">=3.0,<4.0"
opentelemetry-api = ">=1.12,<2.0"
-opentelemetry-instrumentation = "0.48b0"
-opentelemetry-semantic-conventions = "0.48b0"
-opentelemetry-util-http = "0.48b0"
+opentelemetry-instrumentation = "0.49b1"
+opentelemetry-semantic-conventions = "0.49b1"
+opentelemetry-util-http = "0.49b1"
[package.extras]
instruments = ["asgiref (>=3.0,<4.0)"]
[[package]]
name = "opentelemetry-instrumentation-fastapi"
-version = "0.48b0"
+version = "0.49b1"
description = "OpenTelemetry FastAPI Instrumentation"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_instrumentation_fastapi-0.48b0-py3-none-any.whl", hash = "sha256:afeb820a59e139d3e5d96619600f11ce0187658b8ae9e3480857dd790bc024f2"},
- {file = "opentelemetry_instrumentation_fastapi-0.48b0.tar.gz", hash = "sha256:21a72563ea412c0b535815aeed75fc580240f1f02ebc72381cfab672648637a2"},
+ {file = "opentelemetry_instrumentation_fastapi-0.49b1-py3-none-any.whl", hash = "sha256:3398940102c8ef613b9c55fc4f179cc92413de456f6bec6eeb1995270de2b087"},
+ {file = "opentelemetry_instrumentation_fastapi-0.49b1.tar.gz", hash = "sha256:13d9d4d70b4bb831468b8e40807353731cad7fbfaeedde0070d93bcb2c417b07"},
]
[package.dependencies]
opentelemetry-api = ">=1.12,<2.0"
-opentelemetry-instrumentation = "0.48b0"
-opentelemetry-instrumentation-asgi = "0.48b0"
-opentelemetry-semantic-conventions = "0.48b0"
-opentelemetry-util-http = "0.48b0"
+opentelemetry-instrumentation = "0.49b1"
+opentelemetry-instrumentation-asgi = "0.49b1"
+opentelemetry-semantic-conventions = "0.49b1"
+opentelemetry-util-http = "0.49b1"
[package.extras]
instruments = ["fastapi (>=0.58,<1.0)"]
[[package]]
name = "opentelemetry-proto"
-version = "1.27.0"
+version = "1.15.0"
description = "OpenTelemetry Python Proto"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.7"
files = [
- {file = "opentelemetry_proto-1.27.0-py3-none-any.whl", hash = "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace"},
- {file = "opentelemetry_proto-1.27.0.tar.gz", hash = "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6"},
+ {file = "opentelemetry_proto-1.15.0-py3-none-any.whl", hash = "sha256:044b6d044b4d10530f250856f933442b8753a17f94ae37c207607f733fb9a844"},
+ {file = "opentelemetry_proto-1.15.0.tar.gz", hash = "sha256:9c4008e40ac8cab359daac283fbe7002c5c29c77ea2674ad5626a249e64e0101"},
]
[package.dependencies]
@@ -6197,44 +6154,44 @@ protobuf = ">=3.19,<5.0"
[[package]]
name = "opentelemetry-sdk"
-version = "1.27.0"
+version = "1.28.1"
description = "OpenTelemetry Python SDK"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d"},
- {file = "opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f"},
+ {file = "opentelemetry_sdk-1.28.1-py3-none-any.whl", hash = "sha256:72aad7f5fcbe37113c4ab4899f6cdeb6ac77ed3e62f25a85e3627b12583dad0f"},
+ {file = "opentelemetry_sdk-1.28.1.tar.gz", hash = "sha256:100fa371b2046ffba6a340c18f0b2a0463acad7461e5177e126693b613a6ca57"},
]
[package.dependencies]
-opentelemetry-api = "1.27.0"
-opentelemetry-semantic-conventions = "0.48b0"
+opentelemetry-api = "1.28.1"
+opentelemetry-semantic-conventions = "0.49b1"
typing-extensions = ">=3.7.4"
[[package]]
name = "opentelemetry-semantic-conventions"
-version = "0.48b0"
+version = "0.49b1"
description = "OpenTelemetry Semantic Conventions"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f"},
- {file = "opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a"},
+ {file = "opentelemetry_semantic_conventions-0.49b1-py3-none-any.whl", hash = "sha256:dd6f3ac8169d2198c752e1a63f827e5f5e110ae9b0ce33f2aad9a3baf0739743"},
+ {file = "opentelemetry_semantic_conventions-0.49b1.tar.gz", hash = "sha256:91817883b159ffb94c2ca9548509c4fe0aafce7c24f437aa6ac3fc613aa9a758"},
]
[package.dependencies]
deprecated = ">=1.2.6"
-opentelemetry-api = "1.27.0"
+opentelemetry-api = "1.28.1"
[[package]]
name = "opentelemetry-util-http"
-version = "0.48b0"
+version = "0.49b1"
description = "Web util for OpenTelemetry"
optional = false
python-versions = ">=3.8"
files = [
- {file = "opentelemetry_util_http-0.48b0-py3-none-any.whl", hash = "sha256:76f598af93aab50328d2a69c786beaedc8b6a7770f7a818cc307eb353debfffb"},
- {file = "opentelemetry_util_http-0.48b0.tar.gz", hash = "sha256:60312015153580cc20f322e5cdc3d3ecad80a71743235bdb77716e742814623c"},
+ {file = "opentelemetry_util_http-0.49b1-py3-none-any.whl", hash = "sha256:0290b942f7888b6310df6803e52e12f4043b8f224db0659f62dc7b70059eb94f"},
+ {file = "opentelemetry_util_http-0.49b1.tar.gz", hash = "sha256:6c2bc6f7e20e286dbdfcccb9d895fa290ec9d7c596cdf2e06bf1d8e434b2edd0"},
]
[[package]]
@@ -6282,69 +6239,69 @@ cryptography = ">=3.2.1"
[[package]]
name = "orjson"
-version = "3.10.10"
+version = "3.10.11"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
optional = false
python-versions = ">=3.8"
files = [
- {file = "orjson-3.10.10-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b788a579b113acf1c57e0a68e558be71d5d09aa67f62ca1f68e01117e550a998"},
- {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:804b18e2b88022c8905bb79bd2cbe59c0cd014b9328f43da8d3b28441995cda4"},
- {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9972572a1d042ec9ee421b6da69f7cc823da5962237563fa548ab17f152f0b9b"},
- {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc6993ab1c2ae7dd0711161e303f1db69062955ac2668181bfdf2dd410e65258"},
- {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d78e4cacced5781b01d9bc0f0cd8b70b906a0e109825cb41c1b03f9c41e4ce86"},
- {file = "orjson-3.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6eb2598df518281ba0cbc30d24c5b06124ccf7e19169e883c14e0831217a0bc"},
- {file = "orjson-3.10.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23776265c5215ec532de6238a52707048401a568f0fa0d938008e92a147fe2c7"},
- {file = "orjson-3.10.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8cc2a654c08755cef90b468ff17c102e2def0edd62898b2486767204a7f5cc9c"},
- {file = "orjson-3.10.10-cp310-none-win32.whl", hash = "sha256:081b3fc6a86d72efeb67c13d0ea7c030017bd95f9868b1e329a376edc456153b"},
- {file = "orjson-3.10.10-cp310-none-win_amd64.whl", hash = "sha256:ff38c5fb749347768a603be1fb8a31856458af839f31f064c5aa74aca5be9efe"},
- {file = "orjson-3.10.10-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:879e99486c0fbb256266c7c6a67ff84f46035e4f8749ac6317cc83dacd7f993a"},
- {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019481fa9ea5ff13b5d5d95e6fd5ab25ded0810c80b150c2c7b1cc8660b662a7"},
- {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0dd57eff09894938b4c86d4b871a479260f9e156fa7f12f8cad4b39ea8028bb5"},
- {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dbde6d70cd95ab4d11ea8ac5e738e30764e510fc54d777336eec09bb93b8576c"},
- {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2625cb37b8fb42e2147404e5ff7ef08712099197a9cd38895006d7053e69d6"},
- {file = "orjson-3.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbf3c20c6a7db69df58672a0d5815647ecf78c8e62a4d9bd284e8621c1fe5ccb"},
- {file = "orjson-3.10.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:75c38f5647e02d423807d252ce4528bf6a95bd776af999cb1fb48867ed01d1f6"},
- {file = "orjson-3.10.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:23458d31fa50ec18e0ec4b0b4343730928296b11111df5f547c75913714116b2"},
- {file = "orjson-3.10.10-cp311-none-win32.whl", hash = "sha256:2787cd9dedc591c989f3facd7e3e86508eafdc9536a26ec277699c0aa63c685b"},
- {file = "orjson-3.10.10-cp311-none-win_amd64.whl", hash = "sha256:6514449d2c202a75183f807bc755167713297c69f1db57a89a1ef4a0170ee269"},
- {file = "orjson-3.10.10-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8564f48f3620861f5ef1e080ce7cd122ee89d7d6dacf25fcae675ff63b4d6e05"},
- {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5bf161a32b479034098c5b81f2608f09167ad2fa1c06abd4e527ea6bf4837a9"},
- {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68b65c93617bcafa7f04b74ae8bc2cc214bd5cb45168a953256ff83015c6747d"},
- {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8e28406f97fc2ea0c6150f4c1b6e8261453318930b334abc419214c82314f85"},
- {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4d0d9fe174cc7a5bdce2e6c378bcdb4c49b2bf522a8f996aa586020e1b96cee"},
- {file = "orjson-3.10.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3be81c42f1242cbed03cbb3973501fcaa2675a0af638f8be494eaf37143d999"},
- {file = "orjson-3.10.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65f9886d3bae65be026219c0a5f32dbbe91a9e6272f56d092ab22561ad0ea33b"},
- {file = "orjson-3.10.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:730ed5350147db7beb23ddaf072f490329e90a1d059711d364b49fe352ec987b"},
- {file = "orjson-3.10.10-cp312-none-win32.whl", hash = "sha256:a8f4bf5f1c85bea2170800020d53a8877812892697f9c2de73d576c9307a8a5f"},
- {file = "orjson-3.10.10-cp312-none-win_amd64.whl", hash = "sha256:384cd13579a1b4cd689d218e329f459eb9ddc504fa48c5a83ef4889db7fd7a4f"},
- {file = "orjson-3.10.10-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44bffae68c291f94ff5a9b4149fe9d1bdd4cd0ff0fb575bcea8351d48db629a1"},
- {file = "orjson-3.10.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e27b4c6437315df3024f0835887127dac2a0a3ff643500ec27088d2588fa5ae1"},
- {file = "orjson-3.10.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca84df16d6b49325a4084fd8b2fe2229cb415e15c46c529f868c3387bb1339d"},
- {file = "orjson-3.10.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c14ce70e8f39bd71f9f80423801b5d10bf93d1dceffdecd04df0f64d2c69bc01"},
- {file = "orjson-3.10.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:24ac62336da9bda1bd93c0491eff0613003b48d3cb5d01470842e7b52a40d5b4"},
- {file = "orjson-3.10.10-cp313-none-win32.whl", hash = "sha256:eb0a42831372ec2b05acc9ee45af77bcaccbd91257345f93780a8e654efc75db"},
- {file = "orjson-3.10.10-cp313-none-win_amd64.whl", hash = "sha256:f0c4f37f8bf3f1075c6cc8dd8a9f843689a4b618628f8812d0a71e6968b95ffd"},
- {file = "orjson-3.10.10-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:829700cc18503efc0cf502d630f612884258020d98a317679cd2054af0259568"},
- {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0ceb5e0e8c4f010ac787d29ae6299846935044686509e2f0f06ed441c1ca949"},
- {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c25908eb86968613216f3db4d3003f1c45d78eb9046b71056ca327ff92bdbd4"},
- {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:218cb0bc03340144b6328a9ff78f0932e642199ac184dd74b01ad691f42f93ff"},
- {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2277ec2cea3775640dc81ab5195bb5b2ada2fe0ea6eee4677474edc75ea6785"},
- {file = "orjson-3.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:848ea3b55ab5ccc9d7bbd420d69432628b691fba3ca8ae3148c35156cbd282aa"},
- {file = "orjson-3.10.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e3e67b537ac0c835b25b5f7d40d83816abd2d3f4c0b0866ee981a045287a54f3"},
- {file = "orjson-3.10.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7948cfb909353fce2135dcdbe4521a5e7e1159484e0bb024c1722f272488f2b8"},
- {file = "orjson-3.10.10-cp38-none-win32.whl", hash = "sha256:78bee66a988f1a333dc0b6257503d63553b1957889c17b2c4ed72385cd1b96ae"},
- {file = "orjson-3.10.10-cp38-none-win_amd64.whl", hash = "sha256:f1d647ca8d62afeb774340a343c7fc023efacfd3a39f70c798991063f0c681dd"},
- {file = "orjson-3.10.10-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5a059afddbaa6dd733b5a2d76a90dbc8af790b993b1b5cb97a1176ca713b5df8"},
- {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f9b5c59f7e2a1a410f971c5ebc68f1995822837cd10905ee255f96074537ee6"},
- {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d5ef198bafdef4aa9d49a4165ba53ffdc0a9e1c7b6f76178572ab33118afea25"},
- {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf29ce0bb5d3320824ec3d1508652421000ba466abd63bdd52c64bcce9eb1fa"},
- {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dddd5516bcc93e723d029c1633ae79c4417477b4f57dad9bfeeb6bc0315e654a"},
- {file = "orjson-3.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12f2003695b10817f0fa8b8fca982ed7f5761dcb0d93cff4f2f9f6709903fd7"},
- {file = "orjson-3.10.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:672f9874a8a8fb9bb1b771331d31ba27f57702c8106cdbadad8bda5d10bc1019"},
- {file = "orjson-3.10.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1dcbb0ca5fafb2b378b2c74419480ab2486326974826bbf6588f4dc62137570a"},
- {file = "orjson-3.10.10-cp39-none-win32.whl", hash = "sha256:d9bbd3a4b92256875cb058c3381b782649b9a3c68a4aa9a2fff020c2f9cfc1be"},
- {file = "orjson-3.10.10-cp39-none-win_amd64.whl", hash = "sha256:766f21487a53aee8524b97ca9582d5c6541b03ab6210fbaf10142ae2f3ced2aa"},
- {file = "orjson-3.10.10.tar.gz", hash = "sha256:37949383c4df7b4337ce82ee35b6d7471e55195efa7dcb45ab8226ceadb0fe3b"},
+ {file = "orjson-3.10.11-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6dade64687f2bd7c090281652fe18f1151292d567a9302b34c2dbb92a3872f1f"},
+ {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82f07c550a6ccd2b9290849b22316a609023ed851a87ea888c0456485a7d196a"},
+ {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd9a187742d3ead9df2e49240234d728c67c356516cf4db018833a86f20ec18c"},
+ {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77b0fed6f209d76c1c39f032a70df2d7acf24b1812ca3e6078fd04e8972685a3"},
+ {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63fc9d5fe1d4e8868f6aae547a7b8ba0a2e592929245fff61d633f4caccdcdd6"},
+ {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65cd3e3bb4fbb4eddc3c1e8dce10dc0b73e808fcb875f9fab40c81903dd9323e"},
+ {file = "orjson-3.10.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f67c570602300c4befbda12d153113b8974a3340fdcf3d6de095ede86c06d92"},
+ {file = "orjson-3.10.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1f39728c7f7d766f1f5a769ce4d54b5aaa4c3f92d5b84817053cc9995b977acc"},
+ {file = "orjson-3.10.11-cp310-none-win32.whl", hash = "sha256:1789d9db7968d805f3d94aae2c25d04014aae3a2fa65b1443117cd462c6da647"},
+ {file = "orjson-3.10.11-cp310-none-win_amd64.whl", hash = "sha256:5576b1e5a53a5ba8f8df81872bb0878a112b3ebb1d392155f00f54dd86c83ff6"},
+ {file = "orjson-3.10.11-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1444f9cb7c14055d595de1036f74ecd6ce15f04a715e73f33bb6326c9cef01b6"},
+ {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdec57fe3b4bdebcc08a946db3365630332dbe575125ff3d80a3272ebd0ddafe"},
+ {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eed32f33a0ea6ef36ccc1d37f8d17f28a1d6e8eefae5928f76aff8f1df85e67"},
+ {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80df27dd8697242b904f4ea54820e2d98d3f51f91e97e358fc13359721233e4b"},
+ {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:705f03cee0cb797256d54de6695ef219e5bc8c8120b6654dd460848d57a9af3d"},
+ {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03246774131701de8e7059b2e382597da43144a9a7400f178b2a32feafc54bd5"},
+ {file = "orjson-3.10.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8b5759063a6c940a69c728ea70d7c33583991c6982915a839c8da5f957e0103a"},
+ {file = "orjson-3.10.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:677f23e32491520eebb19c99bb34675daf5410c449c13416f7f0d93e2cf5f981"},
+ {file = "orjson-3.10.11-cp311-none-win32.whl", hash = "sha256:a11225d7b30468dcb099498296ffac36b4673a8398ca30fdaec1e6c20df6aa55"},
+ {file = "orjson-3.10.11-cp311-none-win_amd64.whl", hash = "sha256:df8c677df2f9f385fcc85ab859704045fa88d4668bc9991a527c86e710392bec"},
+ {file = "orjson-3.10.11-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:360a4e2c0943da7c21505e47cf6bd725588962ff1d739b99b14e2f7f3545ba51"},
+ {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:496e2cb45de21c369079ef2d662670a4892c81573bcc143c4205cae98282ba97"},
+ {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7dfa8db55c9792d53c5952900c6a919cfa377b4f4534c7a786484a6a4a350c19"},
+ {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51f3382415747e0dbda9dade6f1e1a01a9d37f630d8c9049a8ed0e385b7a90c0"},
+ {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f35a1b9f50a219f470e0e497ca30b285c9f34948d3c8160d5ad3a755d9299433"},
+ {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f3b7c5803138e67028dde33450e054c87e0703afbe730c105f1fcd873496d5"},
+ {file = "orjson-3.10.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f91d9eb554310472bd09f5347950b24442600594c2edc1421403d7610a0998fd"},
+ {file = "orjson-3.10.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dfbb2d460a855c9744bbc8e36f9c3a997c4b27d842f3d5559ed54326e6911f9b"},
+ {file = "orjson-3.10.11-cp312-none-win32.whl", hash = "sha256:d4a62c49c506d4d73f59514986cadebb7e8d186ad510c518f439176cf8d5359d"},
+ {file = "orjson-3.10.11-cp312-none-win_amd64.whl", hash = "sha256:f1eec3421a558ff7a9b010a6c7effcfa0ade65327a71bb9b02a1c3b77a247284"},
+ {file = "orjson-3.10.11-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c46294faa4e4d0eb73ab68f1a794d2cbf7bab33b1dda2ac2959ffb7c61591899"},
+ {file = "orjson-3.10.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52e5834d7d6e58a36846e059d00559cb9ed20410664f3ad156cd2cc239a11230"},
+ {file = "orjson-3.10.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2fc947e5350fdce548bfc94f434e8760d5cafa97fb9c495d2fef6757aa02ec0"},
+ {file = "orjson-3.10.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0efabbf839388a1dab5b72b5d3baedbd6039ac83f3b55736eb9934ea5494d258"},
+ {file = "orjson-3.10.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a3f29634260708c200c4fe148e42b4aae97d7b9fee417fbdd74f8cfc265f15b0"},
+ {file = "orjson-3.10.11-cp313-none-win32.whl", hash = "sha256:1a1222ffcee8a09476bbdd5d4f6f33d06d0d6642df2a3d78b7a195ca880d669b"},
+ {file = "orjson-3.10.11-cp313-none-win_amd64.whl", hash = "sha256:bc274ac261cc69260913b2d1610760e55d3c0801bb3457ba7b9004420b6b4270"},
+ {file = "orjson-3.10.11-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:19b3763e8bbf8ad797df6b6b5e0fc7c843ec2e2fc0621398534e0c6400098f87"},
+ {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be83a13312e5e58d633580c5eb8d0495ae61f180da2722f20562974188af205"},
+ {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:afacfd1ab81f46dedd7f6001b6d4e8de23396e4884cd3c3436bd05defb1a6446"},
+ {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb4d0bea56bba596723d73f074c420aec3b2e5d7d30698bc56e6048066bd560c"},
+ {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96ed1de70fcb15d5fed529a656df29f768187628727ee2788344e8a51e1c1350"},
+ {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bfb30c891b530f3f80e801e3ad82ef150b964e5c38e1fb8482441c69c35c61c"},
+ {file = "orjson-3.10.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d496c74fc2b61341e3cefda7eec21b7854c5f672ee350bc55d9a4997a8a95204"},
+ {file = "orjson-3.10.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:655a493bac606655db9a47fe94d3d84fc7f3ad766d894197c94ccf0c5408e7d3"},
+ {file = "orjson-3.10.11-cp38-none-win32.whl", hash = "sha256:b9546b278c9fb5d45380f4809e11b4dd9844ca7aaf1134024503e134ed226161"},
+ {file = "orjson-3.10.11-cp38-none-win_amd64.whl", hash = "sha256:b592597fe551d518f42c5a2eb07422eb475aa8cfdc8c51e6da7054b836b26782"},
+ {file = "orjson-3.10.11-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c95f2ecafe709b4e5c733b5e2768ac569bed308623c85806c395d9cca00e08af"},
+ {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80c00d4acded0c51c98754fe8218cb49cb854f0f7eb39ea4641b7f71732d2cb7"},
+ {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:461311b693d3d0a060439aa669c74f3603264d4e7a08faa68c47ae5a863f352d"},
+ {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52ca832f17d86a78cbab86cdc25f8c13756ebe182b6fc1a97d534051c18a08de"},
+ {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c57ea78a753812f528178aa2f1c57da633754c91d2124cb28991dab4c79a54"},
+ {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7fcfc6f7ca046383fb954ba528587e0f9336828b568282b27579c49f8e16aad"},
+ {file = "orjson-3.10.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:86b9dd983857970c29e4c71bb3e95ff085c07d3e83e7c46ebe959bac07ebd80b"},
+ {file = "orjson-3.10.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4d83f87582d223e54efb2242a79547611ba4ebae3af8bae1e80fa9a0af83bb7f"},
+ {file = "orjson-3.10.11-cp39-none-win32.whl", hash = "sha256:9fd0ad1c129bc9beb1154c2655f177620b5beaf9a11e0d10bac63ef3fce96950"},
+ {file = "orjson-3.10.11-cp39-none-win_amd64.whl", hash = "sha256:10f416b2a017c8bd17f325fb9dee1fb5cdd7a54e814284896b7c3f2763faa017"},
+ {file = "orjson-3.10.11.tar.gz", hash = "sha256:e35b6d730de6384d5b2dab5fd23f0d76fae8bbc8c353c2f78210aa5fa4beb3ef"},
]
[[package]]
@@ -6378,13 +6335,13 @@ files = [
[[package]]
name = "packaging"
-version = "24.1"
+version = "24.2"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
files = [
- {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
- {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
+ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
+ {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
]
[[package]]
@@ -6501,12 +6458,12 @@ ppft = ">=1.7.6.9"
[[package]]
name = "peewee"
-version = "3.17.7"
+version = "3.17.8"
description = "a little orm"
optional = false
python-versions = "*"
files = [
- {file = "peewee-3.17.7.tar.gz", hash = "sha256:6aefc700bd530fc6ac23fa19c9c5b47041751d92985b799169c8e318e97eabaa"},
+ {file = "peewee-3.17.8.tar.gz", hash = "sha256:ce1d05db3438830b989a1b9d0d0aa4e7f6134d5f6fd57686eeaa26a3e6485a8c"},
]
[[package]]
@@ -6779,19 +6736,19 @@ dill = ["dill (>=0.3.9)"]
[[package]]
name = "primp"
-version = "0.6.5"
+version = "0.7.0"
description = "HTTP client that can impersonate web browsers, mimicking their headers and `TLS/JA3/JA4/HTTP2` fingerprints"
optional = false
python-versions = ">=3.8"
files = [
- {file = "primp-0.6.5-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b2bab0250d38c02a437c75ed94b99e3a8c03a281ba9a4c33780ccd04999c741b"},
- {file = "primp-0.6.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:0aedb33515d86df4c1f91b9d5772e1b74d1593dfe8978c258b136c171f8ab94c"},
- {file = "primp-0.6.5-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8850be30fbfefeb76c1eb5859a55c5f11c8c285a4a03ebf99c73fea964b2a"},
- {file = "primp-0.6.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9b71ac07a79cbb401390e2ee5a5767d0bf202a956a533fd084957020fcb2a64"},
- {file = "primp-0.6.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:79c65fcb07b36bd0f8c3966a4a18c4f6a6d624a33a0b0133b0f0cc8d0050c351"},
- {file = "primp-0.6.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5a55e450bb52a88f4a2891db50577c8f20b134d17d37e93361ee51de1a6fe8c8"},
- {file = "primp-0.6.5-cp38-abi3-win_amd64.whl", hash = "sha256:cbe584de5c177b9f0656b77e88721296ae6151b6c4565e2e0a342b6473990f27"},
- {file = "primp-0.6.5.tar.gz", hash = "sha256:abb46c579ae682f34c1f339faac38709c85ab76c056ec3711a26823334ab8124"},
+ {file = "primp-0.7.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8bb32497584610ca3082969ddc4c789d8e816f5a2f3f4aa0f194ed20047f5e16"},
+ {file = "primp-0.7.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:cca885c33171b3191fed91ae588031e79508a32799e15224f5143154769b27d7"},
+ {file = "primp-0.7.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b71ed550d393ca6cf28c04032dbd7ce8689b5b268f32ce569466f54a4212b3"},
+ {file = "primp-0.7.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dcc4b0ded6bbaeec3dfe68406caf1aa8a090a6d4a0f1584268b77fb460874e8"},
+ {file = "primp-0.7.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5fd2b78ef31c8492efff96ea9faebf1ae6635439454168138ee40b647fd5e97d"},
+ {file = "primp-0.7.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f8fdb7432fc28c71918964b3d8e4d204a8b06a1394813571e4cac4c1aab684b9"},
+ {file = "primp-0.7.0-cp38-abi3-win_amd64.whl", hash = "sha256:5d0523d457b6b2b40c525bc9dff641e00b01f1402492d1a98e77152e77f3ddad"},
+ {file = "primp-0.7.0.tar.gz", hash = "sha256:bef2c1f2e6386c4cc430758a5ddbaee7c5f730cea79e0c4fe69fd9b6a29d35d4"},
]
[package.extras]
@@ -7252,13 +7209,13 @@ semver = ["semver (>=3.0.2)"]
[[package]]
name = "pydantic-settings"
-version = "2.6.0"
+version = "2.6.1"
description = "Settings management using Pydantic"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pydantic_settings-2.6.0-py3-none-any.whl", hash = "sha256:4a819166f119b74d7f8c765196b165f95cc7487ce58ea27dec8a5a26be0970e0"},
- {file = "pydantic_settings-2.6.0.tar.gz", hash = "sha256:44a1804abffac9e6a30372bb45f6cafab945ef5af25e66b1c634c01dd39e0188"},
+ {file = "pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87"},
+ {file = "pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0"},
]
[package.dependencies]
@@ -7272,13 +7229,13 @@ yaml = ["pyyaml (>=6.0.1)"]
[[package]]
name = "pydash"
-version = "8.0.3"
+version = "8.0.4"
description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library."
optional = false
python-versions = ">=3.8"
files = [
- {file = "pydash-8.0.3-py3-none-any.whl", hash = "sha256:c16871476822ee6b59b87e206dd27888240eff50a7b4cd72a4b80b43b6b994d7"},
- {file = "pydash-8.0.3.tar.gz", hash = "sha256:1b27cd3da05b72f0e5ff786c523afd82af796936462e631ffd1b228d91f8b9aa"},
+ {file = "pydash-8.0.4-py3-none-any.whl", hash = "sha256:59d0c9ca0d22b4f8bcfab01bfe2e89b49f4c9e9fa75961caf156094670260999"},
+ {file = "pydash-8.0.4.tar.gz", hash = "sha256:a33fb17b4b06c617da5c57c711605d2dc8723311ee5388c8371f87cd44bf4112"},
]
[package.dependencies]
@@ -7390,16 +7347,17 @@ rsa = ["cryptography"]
[[package]]
name = "pyobvector"
-version = "0.1.6"
+version = "0.1.13"
description = "A python SDK for OceanBase Vector Store, based on SQLAlchemy, compatible with Milvus API."
optional = false
python-versions = "<4.0,>=3.9"
files = [
- {file = "pyobvector-0.1.6-py3-none-any.whl", hash = "sha256:0d700e865a85b4716b9a03384189e49288cd9d5f3cef88aed4740bc82d5fd136"},
- {file = "pyobvector-0.1.6.tar.gz", hash = "sha256:05551addcac8c596992d5e38b480c83ca3481c6cfc6f56a1a1bddfb2e6ae037e"},
+ {file = "pyobvector-0.1.13-py3-none-any.whl", hash = "sha256:b6a9e7a4673aebeefe835e04f7474d2f2ef8b9c96982af41cf9ce6f3e3500fdb"},
+ {file = "pyobvector-0.1.13.tar.gz", hash = "sha256:e4b8f3ba3ad142cd7584b36278a38c0ef2fe7b6af142cdf5467d988e0737e03e"},
]
[package.dependencies]
+aiomysql = ">=0.2.0,<0.3.0"
numpy = ">=1.26.0,<2.0.0"
pymysql = ">=1.1.1,<2.0.0"
sqlalchemy = ">=2.0.32,<3.0.0"
@@ -7615,113 +7573,116 @@ dev = ["pre-commit", "pytest-asyncio", "tox"]
[[package]]
name = "python-calamine"
-version = "0.2.3"
+version = "0.3.1"
description = "Python binding for Rust's library for reading excel and odf file - calamine"
optional = false
python-versions = ">=3.8"
files = [
- {file = "python_calamine-0.2.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f292a03591b1cab1537424851b74baa33b0a55affc315248a7592ba3de1c3e83"},
- {file = "python_calamine-0.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6cfbd23d1147f53fd70fddfb38af2a98896ecad069c9a4120e77358a6fc43b39"},
- {file = "python_calamine-0.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:847373d0152bafd92b739c911de8c2d23e32ea93d9358bf32b58ed4ace382ae7"},
- {file = "python_calamine-0.2.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e0dcdc796eb4b4907618392c4b71146812774ca30bf6162a711b63e54214912"},
- {file = "python_calamine-0.2.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2ee8250638ad174aa22a3776ebd41500cf88af62346f1c857505158d2685852"},
- {file = "python_calamine-0.2.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ac718eb8e9753b986f329aec5dea964005a79115c622a2671fccd0c563d345a"},
- {file = "python_calamine-0.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1baf404027779cb298d15939a5268eb3d477c86a7a8f4cad0734ea513876c2"},
- {file = "python_calamine-0.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc36a85f1a182e49fc318b3e91f06f390d3889ce8c843721cb03a68ca4c7e4ce"},
- {file = "python_calamine-0.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:11e2a74da47adc502c776e399972864802a20d358001a1cfaefb13c36a5116c0"},
- {file = "python_calamine-0.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f19c8eb9f2182cca54c274145b6c8409776b7c08ee5be8a61d44f0448dc55192"},
- {file = "python_calamine-0.2.3-cp310-none-win32.whl", hash = "sha256:37367f85282d87c0d9453cb3caec5a74f2720252bfbc1365d627e9fe12251e56"},
- {file = "python_calamine-0.2.3-cp310-none-win_amd64.whl", hash = "sha256:6d73ef3131b3a7c3894a533857b02fc50198fb65528cbf869742555d1497ee52"},
- {file = "python_calamine-0.2.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e5a36cca8b447295e9edddbe055857bdfdec56cb78554455a03bacd78e3c45a0"},
- {file = "python_calamine-0.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7b5b0803c70269d93b67c42f03e5711a7ba02166fd473a6cb89ef71632167154"},
- {file = "python_calamine-0.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73766349215f69854afb092ef891cb1ff253f4b6611342566c469b46516c6ada"},
- {file = "python_calamine-0.2.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3bf4cf41518541016b9442082360a83f3579955a872cfca5cec50acc3101cce5"},
- {file = "python_calamine-0.2.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f1f6dab7b44deed8cf7b45a6d6d2743b622ba5e21a8b73f52ef1064cc5e3638"},
- {file = "python_calamine-0.2.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1991261d40be3d577ce48c0884c6403aefd1cbef5dcc451e039746aa1d185931"},
- {file = "python_calamine-0.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f675e7f45d9e3f1430f3114701133432c279aba06442e743220f6b648023b5ee"},
- {file = "python_calamine-0.2.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bb7444454cff2c1ad44e7f1a1be776845cbad8f1210d868c7058d2183b3da74"},
- {file = "python_calamine-0.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7a604306cd5ceca720f0426deb49192f2ede5eedd1597b7ff4fa9659a36dc462"},
- {file = "python_calamine-0.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b95afd1a1cd3871d472aa117537b8731c1609756347874b251300cff152176a5"},
- {file = "python_calamine-0.2.3-cp311-none-win32.whl", hash = "sha256:a0ae5a740c9d97b2842d948a91f926a0fab278d247d816fe786219b94507c5a2"},
- {file = "python_calamine-0.2.3-cp311-none-win_amd64.whl", hash = "sha256:a32c64e74673fb0203ad877c6ba4832de7976fd31c79c637552b567d295ff6b5"},
- {file = "python_calamine-0.2.3-cp311-none-win_arm64.whl", hash = "sha256:f8c4c9e7ade09b4122c59e3e0da7e5fba872a0e47d3076702185a4ffdf99dec4"},
- {file = "python_calamine-0.2.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:40e5f75c4a7bb2105e3bd65e7b4656e085c6d86e46af1c56468a2f87c2ed639a"},
- {file = "python_calamine-0.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3557bdd36060db4929f42bf4c2c728a76af60ccc95d5c98f2110331d993a7299"},
- {file = "python_calamine-0.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa75b28686f9dc727d26a97b41c6a2a6ca1d2c679139b6199edbae2782e7c77"},
- {file = "python_calamine-0.2.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d2c8577b00e13f5f43b1c03a2eca01848c3b24467ebaf597729d1e483613c110"},
- {file = "python_calamine-0.2.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4639255202380251833a9ab75c077e687ebbef2120f54030b2dc46eb6ce43105"},
- {file = "python_calamine-0.2.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:583656c6a6e8efac8951cd72459e2d84eea5f2617214ebc7e1c96217b44a0fa1"},
- {file = "python_calamine-0.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68fc61b34a1d82d3eee2109d323268dd455107dfb639b027aa5c388e2781273c"},
- {file = "python_calamine-0.2.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64bb1f212275ed0288f578ee817e5cad4a063cfe5c38bf4c4dc6968957cb95b0"},
- {file = "python_calamine-0.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a7da299c1676dc34cd5f0adf93e92139afbfb832722d5d50a696ac180885aabb"},
- {file = "python_calamine-0.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:599752629ab0c5231159c5bea4f94795dd9b11a36c02dd5bd0613cf257ecd710"},
- {file = "python_calamine-0.2.3-cp312-none-win32.whl", hash = "sha256:fc73da2863c3251862583d64c0d07fe907f489a86a205e2b6ac94a39a1df1b42"},
- {file = "python_calamine-0.2.3-cp312-none-win_amd64.whl", hash = "sha256:a8d1662b4767f863c17ea4c1afc3c3fe3174d7b007ae77349d481e6792d142fe"},
- {file = "python_calamine-0.2.3-cp312-none-win_arm64.whl", hash = "sha256:87af11076364ade6f3da9e33993b6f55ec8dfd5f017129de688fd6d94d7bc24a"},
- {file = "python_calamine-0.2.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1ae98e1db1d3e74df08291f66d872bf7a4c47d96d39f8f589bff5dab873fbd13"},
- {file = "python_calamine-0.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bc270e8827191e7125600c97b61b3c78ec17d394820c2607c801f93c3475a0aa"},
- {file = "python_calamine-0.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c25b18eca7976aac0748fc122fa5109be66801d94b77a7676125fb825a8b67b9"},
- {file = "python_calamine-0.2.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:484330c0a917879afc615dc15e5ad925953a726f1a839ce3c35504a5befdae0c"},
- {file = "python_calamine-0.2.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c15ccb20f49eb6f824664ca8ec741edf09679977c2d41d13a02f0532f71a318b"},
- {file = "python_calamine-0.2.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19421a1b8a808333c39b03e007b74c85220700ceed1229449a21d51803d0671b"},
- {file = "python_calamine-0.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cd8e3069c57a26eea5e6d3addb3dab812cc39b70f0cd11246d6f6592b7f293"},
- {file = "python_calamine-0.2.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d13822a6669a00da497394719a1fa63033ab79858fd653d330a6a7a681a5f6ce"},
- {file = "python_calamine-0.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:767db722eeb9c4d3847a87e4c3c4c9cc3e48938efaed4c507a5dd538a6bc5910"},
- {file = "python_calamine-0.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:4cac4095c25c64ef091fd994f62c5169f3ab0eec39c5bdbd0f319cac633b8183"},
- {file = "python_calamine-0.2.3-cp313-none-win32.whl", hash = "sha256:79aab3dc2c54525896b24002756e12fe09ec573efc2787285c244520bc17c39f"},
- {file = "python_calamine-0.2.3-cp313-none-win_amd64.whl", hash = "sha256:bd6606c893493eb555db5e63aef85b87fd806e6a0aa59bad0dbb591b88db2a0d"},
- {file = "python_calamine-0.2.3-cp313-none-win_arm64.whl", hash = "sha256:9f7b93851c941efba8387bb3c004437541230e8253230868204a079f1dacc21a"},
- {file = "python_calamine-0.2.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5fa0395816ecff641b5df7ee3a2a953fb0f449a88f780e1c8b762b94578fdb9c"},
- {file = "python_calamine-0.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7397213b734e71434be06c3391ba9c23660215dc5e1c5601b8141f9f623fef84"},
- {file = "python_calamine-0.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be628b380f190b4140801731786f14d59d5a25c54398a724543181e6f46e71d3"},
- {file = "python_calamine-0.2.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7fc182ebd15dd629d5c355207b125fd2301f109bc6cd2d91b1e67626fdbec1f"},
- {file = "python_calamine-0.2.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ae983b57379225f44102e0ff2f3724428174d0156ac42b1b69ed7f63ce105b1"},
- {file = "python_calamine-0.2.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98592f79f46cd2d74cd7f4e69ef2031a51138159a5852efe56fa5bc289c106b4"},
- {file = "python_calamine-0.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660347ae698f63f4a495b60411e913cfa448b149e7f51434934782559df6158f"},
- {file = "python_calamine-0.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fef87aa0b533c15e22ddb1bd6c257b3de9616c7a4ed3ca00c3c19e4cd8825d08"},
- {file = "python_calamine-0.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:06ab4232827eed11f6a40ddca5dd9015fe73a10c1cf71a4ab2aa26e63f3d1ffb"},
- {file = "python_calamine-0.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a6f64365bfc2cf6acefc3a618c7f25f64c317be3187d50dba3a2ccdbf405f911"},
- {file = "python_calamine-0.2.3-cp38-none-win32.whl", hash = "sha256:08b4b35d5943574ab44e87e4ccc2250f14ce7e8b34ad437ff95c1ae845823d0e"},
- {file = "python_calamine-0.2.3-cp38-none-win_amd64.whl", hash = "sha256:cd9b57326453be8ab52807cde90f3a61a008ed22a69489b41e9edbf66fb86a68"},
- {file = "python_calamine-0.2.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b439270ac6283a2e00abaae167ed35dececaa73f394bf5be8bf8631f3c9757fc"},
- {file = "python_calamine-0.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:38b6d1c315feaacfa95336f7d8d82bdc9fc75854ceae3dd003f075a4cf943582"},
- {file = "python_calamine-0.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:411812b0ffcf042be71408ae82b6fcc8dd70e2ee9ba8e8024a70242f7bce305e"},
- {file = "python_calamine-0.2.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4086c857d2cd1bf388bab6f18ca6ae453fb6618b8f3547e76447dc759b9a3a2a"},
- {file = "python_calamine-0.2.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6b43b8d0b556cb6e9fa9280cc6a61945fcef0005622590c45fa1471705476b5"},
- {file = "python_calamine-0.2.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce29ebf7b8bd978ef7aaf7755489f67f056327a53ef112a9b24c7a90970f9467"},
- {file = "python_calamine-0.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:042385ce2ba386ef72bd678ed44ee6d4a5de20c9561c3cd1ecd2a57bfdc874cc"},
- {file = "python_calamine-0.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e55fd471afd1c50ad88b442ef20c57d7efd38c7c300992708aa2cff943a29b9"},
- {file = "python_calamine-0.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4972a653bd54a4513e9419c26576429b391cdb4b417e7afa46469089ee7c10ee"},
- {file = "python_calamine-0.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:206524d140eb7d2999791afd4dfd62ceed531af3cfa487ff2b8b8fdc4b7c2b50"},
- {file = "python_calamine-0.2.3-cp39-none-win32.whl", hash = "sha256:e5a2c540d631343ba9f16be2afbb7b9fa187b3ced1b292ecc4cfcd51b8859bef"},
- {file = "python_calamine-0.2.3-cp39-none-win_amd64.whl", hash = "sha256:af65a13551d6575468d7cfcc61028df5d4218796dc4886419049e136148694e6"},
- {file = "python_calamine-0.2.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:10f28b56fb84bd622e23f32881fd17b07ab039e7f2cacdfb6101dce702e77970"},
- {file = "python_calamine-0.2.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d00cef2e12e4b6660b5fab13f936194263e7e11f707f7951b1867995278051df"},
- {file = "python_calamine-0.2.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7aebcbd105e49516dd1831f05a0ffca7c9b85f855bf3a9c68f9bc509a212e381"},
- {file = "python_calamine-0.2.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5a9182590f5ad12e08a0ba9b72dfe0e6b1780ff95153926e2f4564a6018a14"},
- {file = "python_calamine-0.2.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2af3805806088acc7b4d766b58b03d08947a7100e1ef26e55509161adbb36201"},
- {file = "python_calamine-0.2.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5283e049cc36a0e2442f72d0c2c156dc1e7dc7ca48cba02d52c5cb223525b5c3"},
- {file = "python_calamine-0.2.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:9b7d0ef322f073099ea69e4a3db8c31ff4c4f7cdf4cd333f0577ab0c9320eaf5"},
- {file = "python_calamine-0.2.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0bcd07be6953efb08340ccb19b9ae0732b104a9e672edf1ffd2d6b3cc226d815"},
- {file = "python_calamine-0.2.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a8b12de6e2329643dd6b0a56570b853b94149ca7b1b323db3f69a06f61ec1e2"},
- {file = "python_calamine-0.2.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:cad27b0e491060dc72653ccd9288301120b23261e3e374f2401cc133547615d4"},
- {file = "python_calamine-0.2.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:303e2f2a1bdfaf428db7aca50d954667078c0cdf1b585ff090dfca2fac9107d7"},
- {file = "python_calamine-0.2.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a21187b6ebcdabdfe2113df11c2a522b9adc02bcf54bd3ba424ca8c6762cd9b"},
- {file = "python_calamine-0.2.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2773094cc62602f6bcc2acd8e905b3e2292daf6a6c24ddbc85f41065604fd9d4"},
- {file = "python_calamine-0.2.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6de5646a9ec3d24b5089ed174f4dcee13620e65e20dc463097c00e803c81f86f"},
- {file = "python_calamine-0.2.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e976c948ab18e9fee589994b68878381e1e393d870362babf9634258deb4f13b"},
- {file = "python_calamine-0.2.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:00fdfd24d13d8b04619dd933be4888bc6a70427e217fb179f3a1f71f2e377219"},
- {file = "python_calamine-0.2.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ab7d60482520508ebf00476cde1b97011084a2e73ac49b2ca32003547e7444c9"},
- {file = "python_calamine-0.2.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00c915fc67b0b4e1ddd000d374bd808d947f2ecb0f6051a4669a77abada4b7b8"},
- {file = "python_calamine-0.2.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c869fe1b568a2a970b13dd59a58a13a81a667aff2f365a95a577555585ff14bc"},
- {file = "python_calamine-0.2.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:602ebad70b176a41f22547d6bb99a6d32a531a11dbf74720f3984e6bf98c94ab"},
- {file = "python_calamine-0.2.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f6a7c4eb79803ee7cdfd00a0b8267c60c33f25da8bb9275f6168a4dd1a54db76"},
- {file = "python_calamine-0.2.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:68275fed9dcbe90a9185c9919980933e4feea925db178461f0cdb336a2587021"},
- {file = "python_calamine-0.2.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5efc667fd002db9482a7b9f2c70b41fa69c86e18206132be1a0adcad3c998c17"},
- {file = "python_calamine-0.2.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d2d845cbcd767c7b85c616849f0c6cd619662adb98d86af2a3fd8630d6acc48d"},
- {file = "python_calamine-0.2.3.tar.gz", hash = "sha256:d6b3858c3756629d9b4a166de0facfa6c8033fa0b73dcddd3d82144f3170c0dc"},
+ {file = "python_calamine-0.3.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2822c39ad52f289732981cee59b4985388624b54e124e41436bb37565ed32f15"},
+ {file = "python_calamine-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f2786751cfe4e81f9170b843741b39a325cf9f49db8d51fc3cd16d6139e0ac60"},
+ {file = "python_calamine-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086fc992232207164277fd0f1e463f59097637c849470890f903037fde4bf02d"},
+ {file = "python_calamine-0.3.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f42795617d23bb87b16761286c07e8407a9044823c972da5dea922f71a98445"},
+ {file = "python_calamine-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8dc27a41ebca543e5a0181b3edc223b83839c49063589583927de922887898a"},
+ {file = "python_calamine-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:400fd6e650bfedf1a9d79821e32f13aceb0362bbdaa2f37611177eb09cf77056"},
+ {file = "python_calamine-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6aec96ea676ec41789a6348137895b3827745d135c3c7f37769f75d417fb867"},
+ {file = "python_calamine-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:98808c087bbbfe4e858043fc0b953d326c8c70e73d0cd695c29a9bc7b3b0622b"},
+ {file = "python_calamine-0.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:78cd352976ba7324a2e7ab59188b3fac978b5f80d25e753b255dfec2d24076d9"},
+ {file = "python_calamine-0.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e1bfb191b5da6136887ca64deff427cae185d4d59333d1f1a8637db10ce8c3e"},
+ {file = "python_calamine-0.3.1-cp310-none-win32.whl", hash = "sha256:bd9616b355f47326ff4ae970f0a91a17976f316877a56ce3ef376ce58505e66c"},
+ {file = "python_calamine-0.3.1-cp310-none-win_amd64.whl", hash = "sha256:40354b04fb68e63659bb5f423534fe6f0b3e709be322c25c60158ac332b85ed3"},
+ {file = "python_calamine-0.3.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7fee7306d015e2cb89bd69dc7b928bd947b65415e2cd72deb59a72c5603d0adb"},
+ {file = "python_calamine-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c860d5dc649b6be49a94ba07b1673f8dc9be0a89bc33cf13a5ea58998facdb12"},
+ {file = "python_calamine-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1df7ae7c29f96b6714cfebfd41666462970583b92ceb179b5ddd0d4556ea21ec"},
+ {file = "python_calamine-0.3.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84bac53ba4872b795f808d1d30b51c74eac4a57dc8e4f96bba8140ccdeb320da"},
+ {file = "python_calamine-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e93ebe06fee0f10d43ede04691e80ab63366b00edc5eb873742779fdabe626e3"},
+ {file = "python_calamine-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23d04d73d2028c7171c63179f3b4d5679aa057db46e1e0058341b5af047474c4"},
+ {file = "python_calamine-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2005a4bd693dbaa74c96fdaa71a868c149ad376d309c4ad32fe80145216ad2"},
+ {file = "python_calamine-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c7ab2e26f124483308f1c0f580b01e3ad474ce3eb6a3acf0e0273247ea7b8b"},
+ {file = "python_calamine-0.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:86466870c1898b75503e752f7ea7b7a045253f1e106db9555071d225af4a1de8"},
+ {file = "python_calamine-0.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e6a4ef2435715694eeaea537f9578e33a90f68a5e9e697d98ae14d2aacf302cf"},
+ {file = "python_calamine-0.3.1-cp311-none-win32.whl", hash = "sha256:545c0cd8bc72a3341f81f9c46f12cad2ec9f3281360d2893a88a4a4a48f364dc"},
+ {file = "python_calamine-0.3.1-cp311-none-win_amd64.whl", hash = "sha256:90e848bb9a062185cdc697b93798e67475956ce466c122b477e34fc4548e2906"},
+ {file = "python_calamine-0.3.1-cp311-none-win_arm64.whl", hash = "sha256:455e813450eb03bbe3fc09c1324fbb5c367edf4ec0c7a58f81169e5f2008f27d"},
+ {file = "python_calamine-0.3.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:eacea175ba67dd04c0d718bcca0488261bd9eefff3b46ae68249e14d705e14a0"},
+ {file = "python_calamine-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2a9a8021d7256e2a21949886d0fe5c67ae805d4b5f9a4d93b2ef971262e64d4"},
+ {file = "python_calamine-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65d0f0e7a3e554ba5672b9bd5f77b22dd3fc011fd30157c4e377c49b3d95d6d1"},
+ {file = "python_calamine-0.3.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1c3be559acbcec19d79ba07ae81276bbb8fadd474c790db14119a09fb36427fb"},
+ {file = "python_calamine-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:af0226e6826000d83a4ac34d81ae5217cc2baa54aecd76aac07091388bf739a1"},
+ {file = "python_calamine-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02b90d7e2e11c7449331d2cb744075fb47949d4e039983d6e6d9085950ad2642"},
+ {file = "python_calamine-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:732bb98dd393db80de1cd8a90e7d47dced929c7dea56194394d0fb7baf873fa7"},
+ {file = "python_calamine-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56bb60bf663c04e0a4cc801dfd5da3351820a002b4aea72a427603011633d35c"},
+ {file = "python_calamine-0.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b8974ee473e6337c9b52d6cab03a202dbe57e1500eb100d96adb6b0dfbff7390"},
+ {file = "python_calamine-0.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:90876d9b77429c8168d0e4c3ebe1dcf996130c5b0aecb3c6283f85645d4dd29a"},
+ {file = "python_calamine-0.3.1-cp312-none-win32.whl", hash = "sha256:17ab4ba8955206eba4a87c6bc0a805ffa9051f831c9f3d17a463d8a844beb197"},
+ {file = "python_calamine-0.3.1-cp312-none-win_amd64.whl", hash = "sha256:33ff20f6603fb3434630a00190022020102dc26b6def519d19a19a58a487a514"},
+ {file = "python_calamine-0.3.1-cp312-none-win_arm64.whl", hash = "sha256:808ff13261826b64b8313a53a83873cf46df4522cbca98fb66a85b543de68949"},
+ {file = "python_calamine-0.3.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:9401be43100552fb3a0d9a7392e207e4b4dfa0bc99c3f97613c0d703db0b191b"},
+ {file = "python_calamine-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b76875209d89227ea0666c346b5e007fa2ac9cc65b95b91551c4b715d9e6c7be"},
+ {file = "python_calamine-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2079ae2c434e28f1c9d17a2f4ea50d92e27d1373fc5908f1fd0c159f387e5b9"},
+ {file = "python_calamine-0.3.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:65c8986318846728d66ac2ce5dc017e79e6409ef17a48ca284d45f7d68a8ead0"},
+ {file = "python_calamine-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9504e43f4852265ab55044eb2835c270fda137a1ea35d5e4b7d3581d4ac830f4"},
+ {file = "python_calamine-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8eab37611b39cc8093e5671e5f8f8fc7f427459eabc21497f71659be61d5723"},
+ {file = "python_calamine-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e226ff25510e62b57443029e5061dd42b551907a0a983f0e07e6c5e1facb4d"},
+ {file = "python_calamine-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:721a7bfe0d17c12dcf82886a17c9d1025983cfe61fade8c0d2a1b04bb4bd9980"},
+ {file = "python_calamine-0.3.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:1258cb82689ded64b73816fbcb3f02d139c8fd29676e9d451c0f81bb689a7076"},
+ {file = "python_calamine-0.3.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:919fe66fd3d3031585c4373a1dd0b128d3ceb0d79d21c8d0878e9ddee4d6b78a"},
+ {file = "python_calamine-0.3.1-cp313-none-win32.whl", hash = "sha256:a9df3902b279cb743baf857f29c1c7ed242caa7143c4fdf3a79f553801a662d9"},
+ {file = "python_calamine-0.3.1-cp313-none-win_amd64.whl", hash = "sha256:9f96654bceeb10e9ea9624eda857790e1a601593212fc174cb84d1568f12b5e4"},
+ {file = "python_calamine-0.3.1-cp313-none-win_arm64.whl", hash = "sha256:e83bd84617400bbca9907f0a44c6eccaeca7bd011791950c181e402992b8cc26"},
+ {file = "python_calamine-0.3.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b37166dcf7d7706e0ca3cd6e21a138120f69f1697ea5c9e22b29daac36d02f1b"},
+ {file = "python_calamine-0.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:885c668ad97c637a76b18d63d242cafe16629ed4912044c508a2a34e12c08892"},
+ {file = "python_calamine-0.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50c8462add6488c196925ceee73c11775bf7323c88dbf3be6591f49c5e179d71"},
+ {file = "python_calamine-0.3.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfe3ae2e73de00310835495166d16a8de27e49b846923e04f3462d100b964d2f"},
+ {file = "python_calamine-0.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a46c344077da8709163c75441ab61b5833e5f83e2586c4d63ad525987032c314"},
+ {file = "python_calamine-0.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8abd68d8d79da7b5316214c9b065c790538a3e0278b7bc278b5395a41330b6a"},
+ {file = "python_calamine-0.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11727fd075f0c184ef7655659794fc060a138c9ff4d2c5ac66e0d067aa8526f0"},
+ {file = "python_calamine-0.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd08e245a7c2676887e548d7a86a909bdc167a3c582f10937f2f55e7216a7305"},
+ {file = "python_calamine-0.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6ad2ae302ec13c1610170f0953b6c7accf7b26384b0a3813987e16ec78b59982"},
+ {file = "python_calamine-0.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38975ba9b8b8bbe86ab9d384500b0555af6ede8bd328af77cc3d99484bd0e303"},
+ {file = "python_calamine-0.3.1-cp38-none-win32.whl", hash = "sha256:68b8cd5f6aceb56e5eb424830493210d478926e36951ccafe2dad15b440da167"},
+ {file = "python_calamine-0.3.1-cp38-none-win_amd64.whl", hash = "sha256:f2d270c4eb15971eb5e2e87183470f7eafb1307d6df15253a6cff7c5649ffe04"},
+ {file = "python_calamine-0.3.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:dcf5ffd5a63b806de03629c2f25585e455aa245d6e0fd78e7a85dff79d16b6e7"},
+ {file = "python_calamine-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:38c5ff0b9696fe4deec98e8135f33eeee49e302bcfa2ffcc4abe15cb1f8e8054"},
+ {file = "python_calamine-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3331cc70e7496592c9c559fa89a7451db56a200d754e416edb51b9e888a41"},
+ {file = "python_calamine-0.3.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6493fb0fbf766a380d0741c11c6c52b1a06d1917a8e7551f9d560324ca757b82"},
+ {file = "python_calamine-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94f9cd55efdda69352de6679d737d41dfcb1fdb5b8e5c512e0b83fe6b5f3796c"},
+ {file = "python_calamine-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79b2a774944b4ed084d9a677cbf88372f725449a260fd134ab2b3422ef2d4a5d"},
+ {file = "python_calamine-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96c5cfd02f4a20a2df51ff6732839d3f4a4a781e9e904a85191aaeb8fce2870"},
+ {file = "python_calamine-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:edf8d83c7369b18d4d42e9e2ccc52143bdbf27a326877bf3fc6fc56c78655372"},
+ {file = "python_calamine-0.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2252b25e8fd992f10e916fb4eddc504a58839a1e67f32238bba803ecf16ce7c4"},
+ {file = "python_calamine-0.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:26d870656fbe1c3767483f3162359765738adab58915157c55afff4dfc32e9e9"},
+ {file = "python_calamine-0.3.1-cp39-none-win32.whl", hash = "sha256:28c1394a00bd218ce4223f8f8019fd2c1878f1a01ad47be289964d281fef4dac"},
+ {file = "python_calamine-0.3.1-cp39-none-win_amd64.whl", hash = "sha256:529c36520924b16f398e25e78fcd4ea14fdcd95f383db360d107e075628d6941"},
+ {file = "python_calamine-0.3.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4dbe8d5f27889dfcd03d9ad99a9f392b6c0af41dbc287ac4738804c31c99750a"},
+ {file = "python_calamine-0.3.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9bc553349095b3104708cd1eb345445426400de105df7ede3d5054b0ecfa74e9"},
+ {file = "python_calamine-0.3.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f12fa42ea6c7750f994a1a9674414dfd25adb3e61ad570382c05a84e4e8e949e"},
+ {file = "python_calamine-0.3.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbac293a3c4c98e988e564f13820874c6ac02114cef5698a03b8146bd9566ef7"},
+ {file = "python_calamine-0.3.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a571bab6528504cdb99187f4e6a5a64c7ccb065ee1416b9e10c1f416d331aae5"},
+ {file = "python_calamine-0.3.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:49d2acf3def8ecbabb132b537501bb639ca9d52548fd7058d5da7fa9fdbd1b45"},
+ {file = "python_calamine-0.3.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e71ee834988033d3f8254713423ce5232ffe964f1bb2fdc3383f407b8a52dab9"},
+ {file = "python_calamine-0.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:139afbf6a23c33c55ce0144e15e89e03e333a59b4864a2e1e0c764cd33390414"},
+ {file = "python_calamine-0.3.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ea28ebd4d347c13c6acc787dba1fb0f626188469f861a2fa9cd057fa689161e2"},
+ {file = "python_calamine-0.3.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:39d51754c4375b34b58b6036780ee022db80b54a29fbfc577c785da8dfc358f8"},
+ {file = "python_calamine-0.3.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df72ff860dbd9e659a2a3e77a76a89356eea4ebaaa44b6fc4b84cab76e8e5313"},
+ {file = "python_calamine-0.3.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cef919d074235843c5b27f493a645457c0edd9c4f19de3d3187d5cbfad3cf849"},
+ {file = "python_calamine-0.3.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7aa81f93809e1a0b7ad289168444878ffd0c72ffe500efca7ea7a2778df812d4"},
+ {file = "python_calamine-0.3.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:853aab3a19015c49e24892c145646b59719eeb3c71c0582e0af83379d84977a6"},
+ {file = "python_calamine-0.3.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:79d8f506b917e5c1ec75e3b595181416ebe1cc809addf952a23e170606984709"},
+ {file = "python_calamine-0.3.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a4497c11d412e6df3b85a1fde2110f797ff5b2d739ff79fc50ef62476620a27c"},
+ {file = "python_calamine-0.3.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dba960d7668ea7c699f5d68f0a8f7c3f9573fbec26a9db4219cb976c8b751384"},
+ {file = "python_calamine-0.3.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:41a204b59696cae066f399d7a69637e89d1bd34562d411c96108e3675ab57521"},
+ {file = "python_calamine-0.3.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07ef8398036a411896edc6de30eb71a0dcbad61657238525c1c875c089e2a275"},
+ {file = "python_calamine-0.3.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21775f97acbfe40182bb17c256e2f8ce0c787a30b86f09a6786bc4530b17c94b"},
+ {file = "python_calamine-0.3.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d4cdd57ebb563e9bc97501b4eaa7ed3545628d8a0ac482e8903894d80332d506"},
+ {file = "python_calamine-0.3.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:af1e60a711d41e24a24917fe41f98ab36adbcb6f5f85af8a0c895defb5de654f"},
+ {file = "python_calamine-0.3.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1faf371a69da8e364d1391cca3a58e46b3aa181e7202ac6452d09f37d3b99f97"},
+ {file = "python_calamine-0.3.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d5ddc96b67f805b3cb27e21d070cee6d269d9fd3a3cb6d6f2a30bc44f848d0f7"},
+ {file = "python_calamine-0.3.1.tar.gz", hash = "sha256:4171fadf4a2db1b1ed84536fb2f16ea14bde894d690ff321a85e27df26286b37"},
]
+[package.dependencies]
+packaging = ">=24.1,<25.0"
+
[[package]]
name = "python-dateutil"
version = "2.8.2"
@@ -8175,105 +8136,105 @@ rpds-py = ">=0.7.0"
[[package]]
name = "regex"
-version = "2024.9.11"
+version = "2024.11.6"
description = "Alternative regular expression module, to replace re."
optional = false
python-versions = ">=3.8"
files = [
- {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408"},
- {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d"},
- {file = "regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5"},
- {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c"},
- {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8"},
- {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35"},
- {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71"},
- {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8"},
- {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a"},
- {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d"},
- {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137"},
- {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6"},
- {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca"},
- {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a"},
- {file = "regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0"},
- {file = "regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623"},
- {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2cce2449e5927a0bf084d346da6cd5eb016b2beca10d0013ab50e3c226ffc0df"},
- {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b37fa423beefa44919e009745ccbf353d8c981516e807995b2bd11c2c77d268"},
- {file = "regex-2024.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64ce2799bd75039b480cc0360907c4fb2f50022f030bf9e7a8705b636e408fad"},
- {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4cc92bb6db56ab0c1cbd17294e14f5e9224f0cc6521167ef388332604e92679"},
- {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d05ac6fa06959c4172eccd99a222e1fbf17b5670c4d596cb1e5cde99600674c4"},
- {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040562757795eeea356394a7fb13076ad4f99d3c62ab0f8bdfb21f99a1f85664"},
- {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113c008a7780792efc80f9dfe10ba0cd043cbf8dc9a76ef757850f51b4edc50"},
- {file = "regex-2024.9.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e5fb5f77c8745a60105403a774fe2c1759b71d3e7b4ca237a5e67ad066c7199"},
- {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54d9ff35d4515debf14bc27f1e3b38bfc453eff3220f5bce159642fa762fe5d4"},
- {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:df5cbb1fbc74a8305b6065d4ade43b993be03dbe0f8b30032cced0d7740994bd"},
- {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7fb89ee5d106e4a7a51bce305ac4efb981536301895f7bdcf93ec92ae0d91c7f"},
- {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a738b937d512b30bf75995c0159c0ddf9eec0775c9d72ac0202076c72f24aa96"},
- {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e28f9faeb14b6f23ac55bfbbfd3643f5c7c18ede093977f1df249f73fd22c7b1"},
- {file = "regex-2024.9.11-cp311-cp311-win32.whl", hash = "sha256:18e707ce6c92d7282dfce370cd205098384b8ee21544e7cb29b8aab955b66fa9"},
- {file = "regex-2024.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:313ea15e5ff2a8cbbad96ccef6be638393041b0a7863183c2d31e0c6116688cf"},
- {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0d0a6c64fcc4ef9c69bd5b3b3626cc3776520a1637d8abaa62b9edc147a58f7"},
- {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:49b0e06786ea663f933f3710a51e9385ce0cba0ea56b67107fd841a55d56a231"},
- {file = "regex-2024.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b513b6997a0b2f10e4fd3a1313568e373926e8c252bd76c960f96fd039cd28d"},
- {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee439691d8c23e76f9802c42a95cfeebf9d47cf4ffd06f18489122dbb0a7ad64"},
- {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f877c89719d759e52783f7fe6e1c67121076b87b40542966c02de5503ace42"},
- {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23b30c62d0f16827f2ae9f2bb87619bc4fba2044911e2e6c2eb1af0161cdb766"},
- {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ab7824093d8f10d44330fe1e6493f756f252d145323dd17ab6b48733ff6c0a"},
- {file = "regex-2024.9.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dee5b4810a89447151999428fe096977346cf2f29f4d5e29609d2e19e0199c9"},
- {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98eeee2f2e63edae2181c886d7911ce502e1292794f4c5ee71e60e23e8d26b5d"},
- {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57fdd2e0b2694ce6fc2e5ccf189789c3e2962916fb38779d3e3521ff8fe7a822"},
- {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d552c78411f60b1fdaafd117a1fca2f02e562e309223b9d44b7de8be451ec5e0"},
- {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a0b2b80321c2ed3fcf0385ec9e51a12253c50f146fddb2abbb10f033fe3d049a"},
- {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:18406efb2f5a0e57e3a5881cd9354c1512d3bb4f5c45d96d110a66114d84d23a"},
- {file = "regex-2024.9.11-cp312-cp312-win32.whl", hash = "sha256:e464b467f1588e2c42d26814231edecbcfe77f5ac414d92cbf4e7b55b2c2a776"},
- {file = "regex-2024.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:9e8719792ca63c6b8340380352c24dcb8cd7ec49dae36e963742a275dfae6009"},
- {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c157bb447303070f256e084668b702073db99bbb61d44f85d811025fcf38f784"},
- {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4db21ece84dfeefc5d8a3863f101995de646c6cb0536952c321a2650aa202c36"},
- {file = "regex-2024.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:220e92a30b426daf23bb67a7962900ed4613589bab80382be09b48896d211e92"},
- {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1ae19e64c14c7ec1995f40bd932448713d3c73509e82d8cd7744dc00e29e86"},
- {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47cd43a5bfa48f86925fe26fbdd0a488ff15b62468abb5d2a1e092a4fb10e85"},
- {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d4a76b96f398697fe01117093613166e6aa8195d63f1b4ec3f21ab637632963"},
- {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea51dcc0835eea2ea31d66456210a4e01a076d820e9039b04ae8d17ac11dee6"},
- {file = "regex-2024.9.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7aaa315101c6567a9a45d2839322c51c8d6e81f67683d529512f5bcfb99c802"},
- {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c57d08ad67aba97af57a7263c2d9006d5c404d721c5f7542f077f109ec2a4a29"},
- {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8404bf61298bb6f8224bb9176c1424548ee1181130818fcd2cbffddc768bed8"},
- {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dd4490a33eb909ef5078ab20f5f000087afa2a4daa27b4c072ccb3cb3050ad84"},
- {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eee9130eaad130649fd73e5cd92f60e55708952260ede70da64de420cdcad554"},
- {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a2644a93da36c784e546de579ec1806bfd2763ef47babc1b03d765fe560c9f8"},
- {file = "regex-2024.9.11-cp313-cp313-win32.whl", hash = "sha256:e997fd30430c57138adc06bba4c7c2968fb13d101e57dd5bb9355bf8ce3fa7e8"},
- {file = "regex-2024.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:042c55879cfeb21a8adacc84ea347721d3d83a159da6acdf1116859e2427c43f"},
- {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:35f4a6f96aa6cb3f2f7247027b07b15a374f0d5b912c0001418d1d55024d5cb4"},
- {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:55b96e7ce3a69a8449a66984c268062fbaa0d8ae437b285428e12797baefce7e"},
- {file = "regex-2024.9.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb130fccd1a37ed894824b8c046321540263013da72745d755f2d35114b81a60"},
- {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:323c1f04be6b2968944d730e5c2091c8c89767903ecaa135203eec4565ed2b2b"},
- {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be1c8ed48c4c4065ecb19d882a0ce1afe0745dfad8ce48c49586b90a55f02366"},
- {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5b029322e6e7b94fff16cd120ab35a253236a5f99a79fb04fda7ae71ca20ae8"},
- {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6fff13ef6b5f29221d6904aa816c34701462956aa72a77f1f151a8ec4f56aeb"},
- {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d4af3979376652010e400accc30404e6c16b7df574048ab1f581af82065e4"},
- {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:079400a8269544b955ffa9e31f186f01d96829110a3bf79dc338e9910f794fca"},
- {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f9268774428ec173654985ce55fc6caf4c6d11ade0f6f914d48ef4719eb05ebb"},
- {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:23f9985c8784e544d53fc2930fc1ac1a7319f5d5332d228437acc9f418f2f168"},
- {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2941333154baff9838e88aa71c1d84f4438189ecc6021a12c7573728b5838e"},
- {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e93f1c331ca8e86fe877a48ad64e77882c0c4da0097f2212873a69bbfea95d0c"},
- {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:846bc79ee753acf93aef4184c040d709940c9d001029ceb7b7a52747b80ed2dd"},
- {file = "regex-2024.9.11-cp38-cp38-win32.whl", hash = "sha256:c94bb0a9f1db10a1d16c00880bdebd5f9faf267273b8f5bd1878126e0fbde771"},
- {file = "regex-2024.9.11-cp38-cp38-win_amd64.whl", hash = "sha256:2b08fce89fbd45664d3df6ad93e554b6c16933ffa9d55cb7e01182baaf971508"},
- {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:07f45f287469039ffc2c53caf6803cd506eb5f5f637f1d4acb37a738f71dd066"},
- {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4838e24ee015101d9f901988001038f7f0d90dc0c3b115541a1365fb439add62"},
- {file = "regex-2024.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6edd623bae6a737f10ce853ea076f56f507fd7726bee96a41ee3d68d347e4d16"},
- {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c69ada171c2d0e97a4b5aa78fbb835e0ffbb6b13fc5da968c09811346564f0d3"},
- {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02087ea0a03b4af1ed6ebab2c54d7118127fee8d71b26398e8e4b05b78963199"},
- {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69dee6a020693d12a3cf892aba4808fe168d2a4cef368eb9bf74f5398bfd4ee8"},
- {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297f54910247508e6e5cae669f2bc308985c60540a4edd1c77203ef19bfa63ca"},
- {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecea58b43a67b1b79805f1a0255730edaf5191ecef84dbc4cc85eb30bc8b63b9"},
- {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eab4bb380f15e189d1313195b062a6aa908f5bd687a0ceccd47c8211e9cf0d4a"},
- {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0cbff728659ce4bbf4c30b2a1be040faafaa9eca6ecde40aaff86f7889f4ab39"},
- {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:54c4a097b8bc5bb0dfc83ae498061d53ad7b5762e00f4adaa23bee22b012e6ba"},
- {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:73d6d2f64f4d894c96626a75578b0bf7d9e56dcda8c3d037a2118fdfe9b1c664"},
- {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e53b5fbab5d675aec9f0c501274c467c0f9a5d23696cfc94247e1fb56501ed89"},
- {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ffbcf9221e04502fc35e54d1ce9567541979c3fdfb93d2c554f0ca583a19b35"},
- {file = "regex-2024.9.11-cp39-cp39-win32.whl", hash = "sha256:e4c22e1ac1f1ec1e09f72e6c44d8f2244173db7eb9629cc3a346a8d7ccc31142"},
- {file = "regex-2024.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:faa3c142464efec496967359ca99696c896c591c56c53506bac1ad465f66e919"},
- {file = "regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd"},
+ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"},
+ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"},
+ {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"},
+ {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"},
+ {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"},
+ {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"},
+ {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"},
+ {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"},
+ {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"},
+ {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"},
+ {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"},
+ {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"},
+ {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"},
+ {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"},
+ {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"},
+ {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"},
+ {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"},
+ {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"},
+ {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"},
+ {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"},
+ {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"},
+ {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"},
+ {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"},
+ {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"},
+ {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"},
+ {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"},
+ {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"},
+ {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"},
+ {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"},
+ {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"},
+ {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"},
+ {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"},
+ {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"},
+ {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"},
+ {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"},
+ {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"},
+ {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"},
+ {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"},
+ {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"},
+ {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"},
+ {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"},
+ {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"},
+ {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"},
+ {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"},
+ {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"},
+ {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"},
+ {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"},
+ {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"},
+ {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"},
+ {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"},
+ {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"},
+ {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"},
+ {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"},
+ {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"},
+ {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"},
+ {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"},
+ {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"},
+ {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"},
+ {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"},
+ {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"},
+ {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"},
+ {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"},
+ {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"},
+ {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"},
+ {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"},
+ {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"},
+ {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"},
+ {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"},
+ {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"},
+ {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"},
+ {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"},
+ {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"},
+ {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"},
+ {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"},
+ {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"},
+ {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"},
+ {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"},
+ {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"},
+ {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"},
+ {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"},
+ {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"},
+ {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"},
+ {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"},
+ {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"},
+ {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"},
+ {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"},
+ {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"},
+ {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"},
+ {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"},
+ {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"},
+ {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"},
+ {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"},
+ {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"},
+ {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"},
]
[[package]]
@@ -8394,13 +8355,13 @@ py = ">=1.4.26,<2.0.0"
[[package]]
name = "rich"
-version = "13.9.3"
+version = "13.9.4"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
optional = false
python-versions = ">=3.8.0"
files = [
- {file = "rich-13.9.3-py3-none-any.whl", hash = "sha256:9836f5096eb2172c9e77df411c1b009bace4193d6a481d534fea75ebba758283"},
- {file = "rich-13.9.3.tar.gz", hash = "sha256:bc1e01b899537598cf02579d2b9f4a415104d3fc439313a7a2c165d76557a08e"},
+ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"},
+ {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"},
]
[package.dependencies]
@@ -8413,114 +8374,101 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "rpds-py"
-version = "0.20.0"
+version = "0.21.0"
description = "Python bindings to Rust's persistent data structures (rpds)"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
files = [
- {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"},
- {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"},
- {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"},
- {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"},
- {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"},
- {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"},
- {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"},
- {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"},
- {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"},
- {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"},
- {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"},
- {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"},
- {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"},
- {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"},
- {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"},
- {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"},
- {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"},
- {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"},
- {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"},
- {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"},
- {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"},
- {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"},
- {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"},
- {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"},
- {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"},
- {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"},
- {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"},
- {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"},
- {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"},
- {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"},
- {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"},
- {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"},
- {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"},
- {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"},
- {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"},
- {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"},
- {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"},
- {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"},
- {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"},
- {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"},
- {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"},
- {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"},
- {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"},
- {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"},
- {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"},
- {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"},
- {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"},
- {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"},
- {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"},
- {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"},
- {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"},
- {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"},
- {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"},
- {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"},
- {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"},
- {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"},
- {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"},
- {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"},
- {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"},
- {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"},
- {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"},
- {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"},
- {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"},
- {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"},
- {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"},
- {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"},
- {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"},
- {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"},
- {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"},
- {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"},
- {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"},
- {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"},
- {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"},
- {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"},
- {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"},
- {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"},
- {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"},
- {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"},
- {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"},
- {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"},
- {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"},
- {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"},
- {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"},
- {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"},
- {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"},
- {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"},
- {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"},
- {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"},
- {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"},
- {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"},
- {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"},
- {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"},
- {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"},
- {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"},
- {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"},
- {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"},
- {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"},
- {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"},
- {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"},
- {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"},
- {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"},
- {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"},
- {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"},
+ {file = "rpds_py-0.21.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590"},
+ {file = "rpds_py-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250"},
+ {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c"},
+ {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e"},
+ {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0"},
+ {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1"},
+ {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5"},
+ {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e"},
+ {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153"},
+ {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624"},
+ {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664"},
+ {file = "rpds_py-0.21.0-cp310-none-win32.whl", hash = "sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682"},
+ {file = "rpds_py-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5"},
+ {file = "rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95"},
+ {file = "rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9"},
+ {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027"},
+ {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9"},
+ {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3"},
+ {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8"},
+ {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d"},
+ {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75"},
+ {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f"},
+ {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a"},
+ {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8"},
+ {file = "rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a"},
+ {file = "rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e"},
+ {file = "rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d"},
+ {file = "rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72"},
+ {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266"},
+ {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be"},
+ {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab"},
+ {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7"},
+ {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf"},
+ {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4"},
+ {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca"},
+ {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b"},
+ {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11"},
+ {file = "rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952"},
+ {file = "rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd"},
+ {file = "rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937"},
+ {file = "rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560"},
+ {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b"},
+ {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0"},
+ {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44"},
+ {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74"},
+ {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94"},
+ {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3"},
+ {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a"},
+ {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3"},
+ {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976"},
+ {file = "rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202"},
+ {file = "rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e"},
+ {file = "rpds_py-0.21.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928"},
+ {file = "rpds_py-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05"},
+ {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c"},
+ {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb"},
+ {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1"},
+ {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044"},
+ {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592"},
+ {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d"},
+ {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd"},
+ {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87"},
+ {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed"},
+ {file = "rpds_py-0.21.0-cp39-none-win32.whl", hash = "sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8"},
+ {file = "rpds_py-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d"},
+ {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035"},
+ {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919"},
+ {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c"},
+ {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f"},
+ {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333"},
+ {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356"},
+ {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a"},
+ {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061"},
+ {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d"},
+ {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18"},
+ {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c"},
+ {file = "rpds_py-0.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677"},
+ {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba"},
+ {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6"},
+ {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b"},
+ {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a"},
+ {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9"},
+ {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c"},
+ {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271"},
+ {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed"},
+ {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed"},
+ {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e"},
+ {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e"},
+ {file = "rpds_py-0.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89"},
+ {file = "rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db"},
]
[[package]]
@@ -8539,29 +8487,29 @@ pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
-version = "0.7.3"
+version = "0.7.4"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
- {file = "ruff-0.7.3-py3-none-linux_armv6l.whl", hash = "sha256:34f2339dc22687ec7e7002792d1f50712bf84a13d5152e75712ac08be565d344"},
- {file = "ruff-0.7.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:fb397332a1879b9764a3455a0bb1087bda876c2db8aca3a3cbb67b3dbce8cda0"},
- {file = "ruff-0.7.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:37d0b619546103274e7f62643d14e1adcbccb242efda4e4bdb9544d7764782e9"},
- {file = "ruff-0.7.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59f0c3ee4d1a6787614e7135b72e21024875266101142a09a61439cb6e38a5"},
- {file = "ruff-0.7.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:44eb93c2499a169d49fafd07bc62ac89b1bc800b197e50ff4633aed212569299"},
- {file = "ruff-0.7.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d0242ce53f3a576c35ee32d907475a8d569944c0407f91d207c8af5be5dae4e"},
- {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6b6224af8b5e09772c2ecb8dc9f3f344c1aa48201c7f07e7315367f6dd90ac29"},
- {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c50f95a82b94421c964fae4c27c0242890a20fe67d203d127e84fbb8013855f5"},
- {file = "ruff-0.7.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f3eff9961b5d2644bcf1616c606e93baa2d6b349e8aa8b035f654df252c8c67"},
- {file = "ruff-0.7.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8963cab06d130c4df2fd52c84e9f10d297826d2e8169ae0c798b6221be1d1d2"},
- {file = "ruff-0.7.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:61b46049d6edc0e4317fb14b33bd693245281a3007288b68a3f5b74a22a0746d"},
- {file = "ruff-0.7.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:10ebce7696afe4644e8c1a23b3cf8c0f2193a310c18387c06e583ae9ef284de2"},
- {file = "ruff-0.7.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3f36d56326b3aef8eeee150b700e519880d1aab92f471eefdef656fd57492aa2"},
- {file = "ruff-0.7.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5d024301109a0007b78d57ab0ba190087b43dce852e552734ebf0b0b85e4fb16"},
- {file = "ruff-0.7.3-py3-none-win32.whl", hash = "sha256:4ba81a5f0c5478aa61674c5a2194de8b02652f17addf8dfc40c8937e6e7d79fc"},
- {file = "ruff-0.7.3-py3-none-win_amd64.whl", hash = "sha256:588a9ff2fecf01025ed065fe28809cd5a53b43505f48b69a1ac7707b1b7e4088"},
- {file = "ruff-0.7.3-py3-none-win_arm64.whl", hash = "sha256:1713e2c5545863cdbfe2cbce21f69ffaf37b813bfd1fb3b90dc9a6f1963f5a8c"},
- {file = "ruff-0.7.3.tar.gz", hash = "sha256:e1d1ba2e40b6e71a61b063354d04be669ab0d39c352461f3d789cac68b54a313"},
+ {file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"},
+ {file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"},
+ {file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"},
+ {file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"},
+ {file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"},
+ {file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"},
+ {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"},
+ {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"},
+ {file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"},
+ {file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"},
+ {file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"},
+ {file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"},
+ {file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"},
+ {file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"},
+ {file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"},
+ {file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"},
+ {file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"},
+ {file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"},
]
[[package]]
@@ -8758,13 +8706,13 @@ test = ["accelerate (>=0.24.1,<=0.27.0)", "apache-airflow (==2.9.3)", "apache-ai
[[package]]
name = "sagemaker-core"
-version = "1.0.11"
+version = "1.0.14"
description = "An python package for sagemaker core functionalities"
optional = false
python-versions = ">=3.8"
files = [
- {file = "sagemaker_core-1.0.11-py3-none-any.whl", hash = "sha256:d8ee3db83759073aa8c9f2bd4899113088a7c2acf340597e76cf9934e384d915"},
- {file = "sagemaker_core-1.0.11.tar.gz", hash = "sha256:fb48a5dcb859a54de7461c71cf58562a3be259294dcd39c317020a9b018f5016"},
+ {file = "sagemaker_core-1.0.14-py3-none-any.whl", hash = "sha256:b47804d56a5b29967e6f29510e978d8ed541536c44e5aecef4fdecfafaba6aaa"},
+ {file = "sagemaker_core-1.0.14.tar.gz", hash = "sha256:e73b8adfb1ae2f82c948f4a976222acc9e13f0d051129b332a034d3e05908857"},
]
[package.dependencies]
@@ -8943,23 +8891,23 @@ tornado = ["tornado (>=5)"]
[[package]]
name = "setuptools"
-version = "75.3.0"
+version = "75.5.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
files = [
- {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"},
- {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"},
+ {file = "setuptools-75.5.0-py3-none-any.whl", hash = "sha256:87cb777c3b96d638ca02031192d40390e0ad97737e27b6b4fa831bea86f2f829"},
+ {file = "setuptools-75.5.0.tar.gz", hash = "sha256:5c4ccb41111392671f02bb5f8436dfc5a9a7185e80500531b133f5775c4163ef"},
]
[package.extras]
-check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"]
-core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"]
+core = ["importlib-metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
enabler = ["pytest-enabler (>=2.2)"]
-test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
-type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"]
+test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
+type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"]
[[package]]
name = "sgmllib3k"
@@ -9210,13 +9158,13 @@ sqlcipher = ["sqlcipher3_binary"]
[[package]]
name = "sqlparse"
-version = "0.5.1"
+version = "0.5.2"
description = "A non-validating SQL parser."
optional = false
python-versions = ">=3.8"
files = [
- {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"},
- {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"},
+ {file = "sqlparse-0.5.2-py3-none-any.whl", hash = "sha256:e99bc85c78160918c3e1d9230834ab8d80fc06c59d03f8db2618f65f65dda55e"},
+ {file = "sqlparse-0.5.2.tar.gz", hash = "sha256:9e37b35e16d1cc652a2545f0997c1deb23ea28fa1f3eefe609eee3063c3b105f"},
]
[package.extras]
@@ -9394,13 +9342,13 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"]
[[package]]
name = "tencentcloud-sdk-python-common"
-version = "3.0.1257"
+version = "3.0.1266"
description = "Tencent Cloud Common SDK for Python"
optional = false
python-versions = "*"
files = [
- {file = "tencentcloud-sdk-python-common-3.0.1257.tar.gz", hash = "sha256:e10b155d598a60c43a491be10f40f7dae5774a2187d55f2da83bdb559434f3c4"},
- {file = "tencentcloud_sdk_python_common-3.0.1257-py2.py3-none-any.whl", hash = "sha256:f474a2969f3cbff91f45780f18bfbb90ab53f66c0085c4e9b4e07c2fcf0e71d9"},
+ {file = "tencentcloud-sdk-python-common-3.0.1266.tar.gz", hash = "sha256:3b1733a74138b66696c19263e6f579ac4bd7fc6048ffe7cb7d1774ecd09720f6"},
+ {file = "tencentcloud_sdk_python_common-3.0.1266-py2.py3-none-any.whl", hash = "sha256:f6d89ee5f2c71cd701e2f55b4bd3cf4ed69619a7514eee66a7f79fe9ac65d02a"},
]
[package.dependencies]
@@ -9408,17 +9356,17 @@ requests = ">=2.16.0"
[[package]]
name = "tencentcloud-sdk-python-hunyuan"
-version = "3.0.1257"
+version = "3.0.1266"
description = "Tencent Cloud Hunyuan SDK for Python"
optional = false
python-versions = "*"
files = [
- {file = "tencentcloud-sdk-python-hunyuan-3.0.1257.tar.gz", hash = "sha256:4d38505089bed70dda1f806f8c4835f8a8c520efa86dcecfef444045c21b695d"},
- {file = "tencentcloud_sdk_python_hunyuan-3.0.1257-py2.py3-none-any.whl", hash = "sha256:c9089d3e49304c9c20e7465c82372b2cd234e67f63efdffb6798a4093b3a97c6"},
+ {file = "tencentcloud-sdk-python-hunyuan-3.0.1266.tar.gz", hash = "sha256:dcff322290fd4e7c40067c4e80ac9bf19867601c195d6a505f2f3fa1b97cc2ec"},
+ {file = "tencentcloud_sdk_python_hunyuan-3.0.1266-py2.py3-none-any.whl", hash = "sha256:e2f8d156df33e157fc93b70a0574a2da5d2bbb8f98f0b3e7a5783d6c8c072c2b"},
]
[package.dependencies]
-tencentcloud-sdk-python-common = "3.0.1257"
+tencentcloud-sdk-python-common = "3.0.1266"
[[package]]
name = "termcolor"
@@ -9521,13 +9469,13 @@ files = [
[[package]]
name = "tldextract"
-version = "5.1.2"
+version = "5.1.3"
description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well."
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
files = [
- {file = "tldextract-5.1.2-py3-none-any.whl", hash = "sha256:4dfc4c277b6b97fa053899fcdb892d2dc27295851ab5fac4e07797b6a21b2e46"},
- {file = "tldextract-5.1.2.tar.gz", hash = "sha256:c9e17f756f05afb5abac04fe8f766e7e70f9fe387adb1859f0f52408ee060200"},
+ {file = "tldextract-5.1.3-py3-none-any.whl", hash = "sha256:78de310cc2ca018692de5ddf320f9d6bd7c5cf857d0fd4f2175f0cdf4440ea75"},
+ {file = "tldextract-5.1.3.tar.gz", hash = "sha256:d43c7284c23f5dc8a42fd0fee2abede2ff74cc622674e4cb07f514ab3330c338"},
]
[package.dependencies]
@@ -9538,7 +9486,7 @@ requests-file = ">=1.4"
[package.extras]
release = ["build", "twine"]
-testing = ["black", "mypy", "pytest", "pytest-gitignore", "pytest-mock", "responses", "ruff", "syrupy", "tox", "types-filelock", "types-requests"]
+testing = ["mypy", "pytest", "pytest-gitignore", "pytest-mock", "responses", "ruff", "syrupy", "tox", "tox-uv", "types-filelock", "types-requests"]
[[package]]
name = "tokenizers"
@@ -9680,13 +9628,13 @@ files = [
[[package]]
name = "tomli"
-version = "2.0.2"
+version = "2.1.0"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.8"
files = [
- {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"},
- {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"},
+ {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"},
+ {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"},
]
[[package]]
@@ -9708,13 +9656,13 @@ six = "*"
[[package]]
name = "tqdm"
-version = "4.66.6"
+version = "4.67.0"
description = "Fast, Extensible Progress Meter"
optional = false
python-versions = ">=3.7"
files = [
- {file = "tqdm-4.66.6-py3-none-any.whl", hash = "sha256:223e8b5359c2efc4b30555531f09e9f2f3589bcd7fdd389271191031b49b7a63"},
- {file = "tqdm-4.66.6.tar.gz", hash = "sha256:4bdd694238bef1485ce839d67967ab50af8f9272aab687c0d7702a01da0be090"},
+ {file = "tqdm-4.67.0-py3-none-any.whl", hash = "sha256:0cd8af9d56911acab92182e88d763100d4788bdf421d251616040cc4d44863be"},
+ {file = "tqdm-4.67.0.tar.gz", hash = "sha256:fe5a6f95e6fe0b9755e9469b77b9c3cf850048224ecaa8293d7d2d31f97d869a"},
]
[package.dependencies]
@@ -9722,6 +9670,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
[package.extras]
dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"]
+discord = ["requests"]
notebook = ["ipywidgets (>=6)"]
slack = ["slack-sdk"]
telegram = ["requests"]
@@ -9813,13 +9762,13 @@ requests = ">=2.0.0"
[[package]]
name = "typer"
-version = "0.12.5"
+version = "0.13.0"
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
optional = false
python-versions = ">=3.7"
files = [
- {file = "typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b"},
- {file = "typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722"},
+ {file = "typer-0.13.0-py3-none-any.whl", hash = "sha256:d85fe0b777b2517cc99c8055ed735452f2659cd45e451507c76f48ce5c1d00e2"},
+ {file = "typer-0.13.0.tar.gz", hash = "sha256:f1c7198347939361eec90139ffa0fd8b3df3a2259d5852a0f7400e476d95985c"},
]
[package.dependencies]
@@ -9968,13 +9917,13 @@ files = [
[[package]]
name = "unstructured"
-version = "0.16.3"
+version = "0.16.5"
description = "A library that prepares raw documents for downstream ML tasks."
optional = false
python-versions = "<3.13,>=3.9.0"
files = [
- {file = "unstructured-0.16.3-py3-none-any.whl", hash = "sha256:e0e3b56531b44e62154d17cbfdae7fd7fa1d795b7cf510fb654c6714d4257655"},
- {file = "unstructured-0.16.3.tar.gz", hash = "sha256:f9528636773c910a53c8a34e32d4733ea54b79cbd507d0e956e299ab1da3003f"},
+ {file = "unstructured-0.16.5-py3-none-any.whl", hash = "sha256:d867e6d5c002c159997bb44df82c43531570c32fa87a010a0aae8a7a0e22ec49"},
+ {file = "unstructured-0.16.5.tar.gz", hash = "sha256:2c36de777f88529e0f7c306eb8116b755963928d50d331bbfee56e2f61fe023f"},
]
[package.dependencies]
@@ -10027,13 +9976,13 @@ xlsx = ["networkx", "openpyxl", "pandas", "xlrd"]
[[package]]
name = "unstructured-client"
-version = "0.26.2"
+version = "0.27.0"
description = "Python Client SDK for Unstructured API"
optional = false
python-versions = "<4.0,>=3.8"
files = [
- {file = "unstructured_client-0.26.2-py3-none-any.whl", hash = "sha256:0adb22b7d175814f333ee2425a279005f253220a55f459fd5830a6779b679780"},
- {file = "unstructured_client-0.26.2.tar.gz", hash = "sha256:02f7183ab16db6ec48ad1ac75c01b05967c87c561a89e96d9ffb836baed902d7"},
+ {file = "unstructured_client-0.27.0-py3-none-any.whl", hash = "sha256:e6413df1e2a4de36b78701369da83456e3a0b7b7cb9d1995c04ef55a1c9a8def"},
+ {file = "unstructured_client-0.27.0.tar.gz", hash = "sha256:6eac20457205d61544d0cb8572a113d83ac61f8dda6fcec20a0a0c6259f256b6"},
]
[package.dependencies]
@@ -10042,7 +9991,7 @@ eval-type-backport = ">=0.2.0,<0.3.0"
httpx = ">=0.27.0"
jsonpath-python = ">=1.0.6,<2.0.0"
nest-asyncio = ">=1.6.0"
-pydantic = ">=2.9.0,<2.10.0"
+pydantic = ">=2.9.2,<2.10.0"
pypdf = ">=4.0"
python-dateutil = "2.8.2"
requests-toolbelt = ">=1.0.0"
@@ -10828,13 +10777,13 @@ multidict = ">=4.0"
[[package]]
name = "yfinance"
-version = "0.2.48"
+version = "0.2.49"
description = "Download market data from Yahoo! Finance API"
optional = false
python-versions = "*"
files = [
- {file = "yfinance-0.2.48-py2.py3-none-any.whl", hash = "sha256:eda797145faa4536595eb629f869d3616e58ed7e71de36856b19f1abaef71a5b"},
- {file = "yfinance-0.2.48.tar.gz", hash = "sha256:1434cd8bf22f345fa27ef1ed82bfdd291c1bb5b6fe3067118a94e256aa90c4eb"},
+ {file = "yfinance-0.2.49-py2.py3-none-any.whl", hash = "sha256:cc9c7d09826e7eaee96d179395e814b911e083fbfb325c2fe693cae019b47f38"},
+ {file = "yfinance-0.2.49.tar.gz", hash = "sha256:e6b45f8392feb11360450630f86f96a46dfa708d77c334d5376564a9eead952b"},
]
[package.dependencies]
@@ -10854,6 +10803,20 @@ requests = ">=2.31"
nospam = ["requests-cache (>=1.0)", "requests-ratelimiter (>=0.3.1)"]
repair = ["scipy (>=1.6.3)"]
+[[package]]
+name = "youtube-transcript-api"
+version = "0.6.2"
+description = "This is an python API which allows you to get the transcripts/subtitles for a given YouTube video. It also works for automatically generated subtitles, supports translating subtitles and it does not require a headless browser, like other selenium based solutions do!"
+optional = false
+python-versions = "*"
+files = [
+ {file = "youtube_transcript_api-0.6.2-py3-none-any.whl", hash = "sha256:019dbf265c6a68a0591c513fff25ed5a116ce6525832aefdfb34d4df5567121c"},
+ {file = "youtube_transcript_api-0.6.2.tar.gz", hash = "sha256:cad223d7620633cec44f657646bffc8bbc5598bd8e70b1ad2fa8277dec305eb7"},
+]
+
+[package.dependencies]
+requests = "*"
+
[[package]]
name = "zhipuai"
version = "2.1.5.20230904"
@@ -10874,13 +10837,13 @@ pyjwt = ">=2.8.0,<2.9.0"
[[package]]
name = "zipp"
-version = "3.20.2"
+version = "3.21.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
files = [
- {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"},
- {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"},
+ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"},
+ {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"},
]
[package.extras]
@@ -11078,4 +11041,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
-content-hash = "2ba4b464eebc26598f290fa94713acc44c588f902176e6efa80622911d40f0ac"
+content-hash = "69a3f471f85dce9e5fb889f739e148a4a6d95aaf94081414503867c7157dba69"
diff --git a/api/pyproject.toml b/api/pyproject.toml
index 0633e9dd90..0d87c1b1c8 100644
--- a/api/pyproject.toml
+++ b/api/pyproject.toml
@@ -35,6 +35,7 @@ select = [
"S506", # unsafe-yaml-load
"SIM", # flake8-simplify rules
"TRY400", # error-instead-of-exception
+ "TRY401", # verbose-log-message
"UP", # pyupgrade rules
"W191", # tab-indentation
"W605", # invalid-escape-sequence
@@ -186,6 +187,7 @@ websocket-client = "~1.7.0"
werkzeug = "~3.0.1"
xinference-client = "0.15.2"
yarl = "~1.9.4"
+youtube-transcript-api = "~0.6.2"
zhipuai = "~2.1.5"
# Before adding new dependency, consider place it in alphabet order (a-z) and suitable group.
diff --git a/api/schedule/clean_messages.py b/api/schedule/clean_messages.py
new file mode 100644
index 0000000000..72ee2a8901
--- /dev/null
+++ b/api/schedule/clean_messages.py
@@ -0,0 +1,79 @@
+import datetime
+import time
+
+import click
+from werkzeug.exceptions import NotFound
+
+import app
+from configs import dify_config
+from extensions.ext_database import db
+from extensions.ext_redis import redis_client
+from models.model import (
+ App,
+ Message,
+ MessageAgentThought,
+ MessageAnnotation,
+ MessageChain,
+ MessageFeedback,
+ MessageFile,
+)
+from models.web import SavedMessage
+from services.feature_service import FeatureService
+
+
+@app.celery.task(queue="dataset")
+def clean_messages():
+ click.echo(click.style("Start clean messages.", fg="green"))
+ start_at = time.perf_counter()
+ plan_sandbox_clean_message_day = datetime.datetime.now() - datetime.timedelta(
+ days=dify_config.PLAN_SANDBOX_CLEAN_MESSAGE_DAY_SETTING
+ )
+ page = 1
+ while True:
+ try:
+ # Main query with join and filter
+ messages = (
+ db.session.query(Message)
+ .filter(Message.created_at < plan_sandbox_clean_message_day)
+ .order_by(Message.created_at.desc())
+ .paginate(page=page, per_page=100)
+ )
+
+ except NotFound:
+ break
+ if messages.items is None or len(messages.items) == 0:
+ break
+ for message in messages.items:
+ app = App.query.filter_by(id=message.app_id).first()
+ features_cache_key = f"features:{app.tenant_id}"
+ plan_cache = redis_client.get(features_cache_key)
+ if plan_cache is None:
+ features = FeatureService.get_features(app.tenant_id)
+ redis_client.setex(features_cache_key, 600, features.billing.subscription.plan)
+ plan = features.billing.subscription.plan
+ else:
+ plan = plan_cache.decode()
+ if plan == "sandbox":
+ # clean related message
+ db.session.query(MessageFeedback).filter(MessageFeedback.message_id == message.id).delete(
+ synchronize_session=False
+ )
+ db.session.query(MessageAnnotation).filter(MessageAnnotation.message_id == message.id).delete(
+ synchronize_session=False
+ )
+ db.session.query(MessageChain).filter(MessageChain.message_id == message.id).delete(
+ synchronize_session=False
+ )
+ db.session.query(MessageAgentThought).filter(MessageAgentThought.message_id == message.id).delete(
+ synchronize_session=False
+ )
+ db.session.query(MessageFile).filter(MessageFile.message_id == message.id).delete(
+ synchronize_session=False
+ )
+ db.session.query(SavedMessage).filter(SavedMessage.message_id == message.id).delete(
+ synchronize_session=False
+ )
+ db.session.query(Message).filter(Message.id == message.id).delete()
+ db.session.commit()
+ end_at = time.perf_counter()
+ click.echo(click.style("Cleaned unused dataset from db success latency: {}".format(end_at - start_at), fg="green"))
diff --git a/api/schedule/clean_unused_datasets_task.py b/api/schedule/clean_unused_datasets_task.py
index 100fd8dfab..e12be649e4 100644
--- a/api/schedule/clean_unused_datasets_task.py
+++ b/api/schedule/clean_unused_datasets_task.py
@@ -22,7 +22,6 @@ def clean_unused_datasets_task():
start_at = time.perf_counter()
plan_sandbox_clean_day = datetime.datetime.now() - datetime.timedelta(days=plan_sandbox_clean_day_setting)
plan_pro_clean_day = datetime.datetime.now() - datetime.timedelta(days=plan_pro_clean_day_setting)
- page = 1
while True:
try:
# Subquery for counting new documents
@@ -62,14 +61,13 @@ def clean_unused_datasets_task():
func.coalesce(document_subquery_old.c.document_count, 0) > 0,
)
.order_by(Dataset.created_at.desc())
- .paginate(page=page, per_page=50)
+ .paginate(page=1, per_page=50)
)
except NotFound:
break
if datasets.items is None or len(datasets.items) == 0:
break
- page += 1
for dataset in datasets:
dataset_query = (
db.session.query(DatasetQuery)
@@ -92,7 +90,6 @@ def clean_unused_datasets_task():
click.echo(
click.style("clean dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red")
)
- page = 1
while True:
try:
# Subquery for counting new documents
@@ -132,14 +129,13 @@ def clean_unused_datasets_task():
func.coalesce(document_subquery_old.c.document_count, 0) > 0,
)
.order_by(Dataset.created_at.desc())
- .paginate(page=page, per_page=50)
+ .paginate(page=1, per_page=50)
)
except NotFound:
break
if datasets.items is None or len(datasets.items) == 0:
break
- page += 1
for dataset in datasets:
dataset_query = (
db.session.query(DatasetQuery)
@@ -149,11 +145,13 @@ def clean_unused_datasets_task():
if not dataset_query or len(dataset_query) == 0:
try:
features_cache_key = f"features:{dataset.tenant_id}"
- plan = redis_client.get(features_cache_key)
- if plan is None:
+ plan_cache = redis_client.get(features_cache_key)
+ if plan_cache is None:
features = FeatureService.get_features(dataset.tenant_id)
redis_client.setex(features_cache_key, 600, features.billing.subscription.plan)
plan = features.billing.subscription.plan
+ else:
+ plan = plan_cache.decode()
if plan == "sandbox":
# remove index
index_processor = IndexProcessorFactory(dataset.doc_form).init_index_processor()
diff --git a/api/services/account_service.py b/api/services/account_service.py
index 963a055948..3d7f9e7dfb 100644
--- a/api/services/account_service.py
+++ b/api/services/account_service.py
@@ -198,9 +198,9 @@ class AccountService:
) -> Account:
"""create account"""
if not FeatureService.get_system_features().is_allow_register and not is_setup:
- from controllers.console.error import NotAllowedRegister
+ from controllers.console.error import AccountNotFound
- raise NotAllowedRegister()
+ raise AccountNotFound()
account = Account()
account.email = email
account.name = name
@@ -779,7 +779,7 @@ class RegisterService:
db.session.query(Tenant).delete()
db.session.commit()
- logging.exception(f"Setup failed: {e}")
+ logging.exception(f"Setup account failed, email: {email}, name: {name}")
raise ValueError(f"Setup failed: {e}")
@classmethod
@@ -821,7 +821,7 @@ class RegisterService:
db.session.rollback()
except Exception as e:
db.session.rollback()
- logging.exception(f"Register failed: {e}")
+ logging.exception("Register failed")
raise AccountRegisterError(f"Registration failed: {e}") from e
return account
diff --git a/api/services/app_service.py b/api/services/app_service.py
index ac45d623e8..620d0ac270 100644
--- a/api/services/app_service.py
+++ b/api/services/app_service.py
@@ -88,7 +88,7 @@ class AppService:
except (ProviderTokenNotInitError, LLMBadRequestError):
model_instance = None
except Exception as e:
- logging.exception(e)
+ logging.exception(f"Get default model instance failed, tenant_id: {tenant_id}")
model_instance = None
if model_instance:
diff --git a/api/services/feature_service.py b/api/services/feature_service.py
index c321393bc5..d0b04628cf 100644
--- a/api/services/feature_service.py
+++ b/api/services/feature_service.py
@@ -1,3 +1,5 @@
+from enum import Enum
+
from pydantic import BaseModel, ConfigDict
from configs import dify_config
@@ -20,6 +22,20 @@ class LimitationModel(BaseModel):
limit: int = 0
+class LicenseStatus(str, Enum):
+ NONE = "none"
+ INACTIVE = "inactive"
+ ACTIVE = "active"
+ EXPIRING = "expiring"
+ EXPIRED = "expired"
+ LOST = "lost"
+
+
+class LicenseModel(BaseModel):
+ status: LicenseStatus = LicenseStatus.NONE
+ expired_at: str = ""
+
+
class FeatureModel(BaseModel):
billing: BillingModel = BillingModel()
members: LimitationModel = LimitationModel(size=0, limit=1)
@@ -47,6 +63,7 @@ class SystemFeatureModel(BaseModel):
enable_social_oauth_login: bool = False
is_allow_register: bool = False
is_allow_create_workspace: bool = False
+ license: LicenseModel = LicenseModel()
class FeatureService:
@@ -131,17 +148,31 @@ class FeatureService:
if "sso_enforced_for_signin" in enterprise_info:
features.sso_enforced_for_signin = enterprise_info["sso_enforced_for_signin"]
+
if "sso_enforced_for_signin_protocol" in enterprise_info:
features.sso_enforced_for_signin_protocol = enterprise_info["sso_enforced_for_signin_protocol"]
+
if "sso_enforced_for_web" in enterprise_info:
features.sso_enforced_for_web = enterprise_info["sso_enforced_for_web"]
+
if "sso_enforced_for_web_protocol" in enterprise_info:
features.sso_enforced_for_web_protocol = enterprise_info["sso_enforced_for_web_protocol"]
+
if "enable_email_code_login" in enterprise_info:
features.enable_email_code_login = enterprise_info["enable_email_code_login"]
+
if "enable_email_password_login" in enterprise_info:
features.enable_email_password_login = enterprise_info["enable_email_password_login"]
+
if "is_allow_register" in enterprise_info:
features.is_allow_register = enterprise_info["is_allow_register"]
+
if "is_allow_create_workspace" in enterprise_info:
features.is_allow_create_workspace = enterprise_info["is_allow_create_workspace"]
+
+ if "license" in enterprise_info:
+ if "status" in enterprise_info["license"]:
+ features.license.status = enterprise_info["license"]["status"]
+
+ if "expired_at" in enterprise_info["license"]:
+ features.license.expired_at = enterprise_info["license"]["expired_at"]
diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py
index b6b0143fac..78a80f70ab 100644
--- a/api/services/tools/api_tools_manage_service.py
+++ b/api/services/tools/api_tools_manage_service.py
@@ -195,7 +195,7 @@ class ApiToolManageService:
# try to parse schema, avoid SSRF attack
ApiToolManageService.parser_api_schema(schema)
except Exception as e:
- logger.exception(f"parse api schema error: {str(e)}")
+ logger.exception("parse api schema error")
raise ValueError("invalid schema, please check the url you provided")
return {"schema": schema}
diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py
index e535ddb575..1befa11531 100644
--- a/api/services/tools/tools_transform_service.py
+++ b/api/services/tools/tools_transform_service.py
@@ -183,7 +183,7 @@ class ToolTransformService:
try:
username = db_provider.user.name
except Exception as e:
- logger.exception(f"failed to get user name for api provider {db_provider.id}: {str(e)}")
+ logger.exception(f"failed to get user name for api provider {db_provider.id}")
# add provider into providers
credentials = db_provider.credentials
result = UserToolProvider(
diff --git a/api/tasks/annotation/delete_annotation_index_task.py b/api/tasks/annotation/delete_annotation_index_task.py
index 5758db53de..f0f6b32b06 100644
--- a/api/tasks/annotation/delete_annotation_index_task.py
+++ b/api/tasks/annotation/delete_annotation_index_task.py
@@ -38,4 +38,4 @@ def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str
click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green")
)
except Exception as e:
- logging.exception("Annotation deleted index failed:{}".format(str(e)))
+ logging.exception("Annotation deleted index failed")
diff --git a/api/tasks/annotation/disable_annotation_reply_task.py b/api/tasks/annotation/disable_annotation_reply_task.py
index 0f83dfdbd4..a2f4913513 100644
--- a/api/tasks/annotation/disable_annotation_reply_task.py
+++ b/api/tasks/annotation/disable_annotation_reply_task.py
@@ -60,7 +60,7 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str):
click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green")
)
except Exception as e:
- logging.exception("Annotation batch deleted index failed:{}".format(str(e)))
+ logging.exception("Annotation batch deleted index failed")
redis_client.setex(disable_app_annotation_job_key, 600, "error")
disable_app_annotation_error_key = "disable_app_annotation_error_{}".format(str(job_id))
redis_client.setex(disable_app_annotation_error_key, 600, str(e))
diff --git a/api/tasks/annotation/enable_annotation_reply_task.py b/api/tasks/annotation/enable_annotation_reply_task.py
index 82b70f6b71..e819bf3635 100644
--- a/api/tasks/annotation/enable_annotation_reply_task.py
+++ b/api/tasks/annotation/enable_annotation_reply_task.py
@@ -93,7 +93,7 @@ def enable_annotation_reply_task(
click.style("App annotations added to index: {} latency: {}".format(app_id, end_at - start_at), fg="green")
)
except Exception as e:
- logging.exception("Annotation batch created index failed:{}".format(str(e)))
+ logging.exception("Annotation batch created index failed")
redis_client.setex(enable_app_annotation_job_key, 600, "error")
enable_app_annotation_error_key = "enable_app_annotation_error_{}".format(str(job_id))
redis_client.setex(enable_app_annotation_error_key, 600, str(e))
diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py
index d1b41f2675..5ee72c27fc 100644
--- a/api/tasks/batch_create_segment_to_index_task.py
+++ b/api/tasks/batch_create_segment_to_index_task.py
@@ -103,5 +103,5 @@ def batch_create_segment_to_index_task(
click.style("Segment batch created job: {} latency: {}".format(job_id, end_at - start_at), fg="green")
)
except Exception as e:
- logging.exception("Segments batch created index failed:{}".format(str(e)))
+ logging.exception("Segments batch created index failed")
redis_client.setex(indexing_cache_key, 600, "error")
diff --git a/api/tests/integration_tests/model_runtime/google/test_llm.py b/api/tests/integration_tests/model_runtime/google/test_llm.py
index 34d08f270a..2877fa1507 100644
--- a/api/tests/integration_tests/model_runtime/google/test_llm.py
+++ b/api/tests/integration_tests/model_runtime/google/test_llm.py
@@ -31,7 +31,7 @@ def test_invoke_model(setup_google_mock):
model = GoogleLargeLanguageModel()
response = model.invoke(
- model="gemini-pro",
+ model="gemini-1.5-pro",
credentials={"google_api_key": os.environ.get("GOOGLE_API_KEY")},
prompt_messages=[
SystemPromptMessage(
@@ -48,7 +48,7 @@ def test_invoke_model(setup_google_mock):
]
),
],
- model_parameters={"temperature": 0.5, "top_p": 1.0, "max_tokens_to_sample": 2048},
+ model_parameters={"temperature": 0.5, "top_p": 1.0, "max_output_tokens": 2048},
stop=["How"],
stream=False,
user="abc-123",
@@ -63,7 +63,7 @@ def test_invoke_stream_model(setup_google_mock):
model = GoogleLargeLanguageModel()
response = model.invoke(
- model="gemini-pro",
+ model="gemini-1.5-pro",
credentials={"google_api_key": os.environ.get("GOOGLE_API_KEY")},
prompt_messages=[
SystemPromptMessage(
@@ -80,7 +80,7 @@ def test_invoke_stream_model(setup_google_mock):
]
),
],
- model_parameters={"temperature": 0.2, "top_k": 5, "max_tokens_to_sample": 2048},
+ model_parameters={"temperature": 0.2, "top_k": 5, "max_tokens": 2048},
stream=True,
user="abc-123",
)
@@ -99,7 +99,7 @@ def test_invoke_chat_model_with_vision(setup_google_mock):
model = GoogleLargeLanguageModel()
result = model.invoke(
- model="gemini-pro-vision",
+ model="gemini-1.5-pro",
credentials={"google_api_key": os.environ.get("GOOGLE_API_KEY")},
prompt_messages=[
SystemPromptMessage(
@@ -128,7 +128,7 @@ def test_invoke_chat_model_with_vision_multi_pics(setup_google_mock):
model = GoogleLargeLanguageModel()
result = model.invoke(
- model="gemini-pro-vision",
+ model="gemini-1.5-pro",
credentials={"google_api_key": os.environ.get("GOOGLE_API_KEY")},
prompt_messages=[
SystemPromptMessage(content="You are a helpful AI assistant."),
@@ -164,7 +164,7 @@ def test_get_num_tokens():
model = GoogleLargeLanguageModel()
num_tokens = model.get_num_tokens(
- model="gemini-pro",
+ model="gemini-1.5-pro",
credentials={"google_api_key": os.environ.get("GOOGLE_API_KEY")},
prompt_messages=[
SystemPromptMessage(
diff --git a/api/tests/integration_tests/vdb/analyticdb/test_analyticdb.py b/api/tests/integration_tests/vdb/analyticdb/test_analyticdb.py
index 970b98edc3..4f44d2ffd6 100644
--- a/api/tests/integration_tests/vdb/analyticdb/test_analyticdb.py
+++ b/api/tests/integration_tests/vdb/analyticdb/test_analyticdb.py
@@ -1,27 +1,43 @@
from core.rag.datasource.vdb.analyticdb.analyticdb_vector import AnalyticdbConfig, AnalyticdbVector
+from core.rag.datasource.vdb.analyticdb.analyticdb_vector_openapi import AnalyticdbVectorOpenAPIConfig
+from core.rag.datasource.vdb.analyticdb.analyticdb_vector_sql import AnalyticdbVectorBySqlConfig
from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, setup_mock_redis
class AnalyticdbVectorTest(AbstractVectorTest):
- def __init__(self):
+ def __init__(self, config_type: str):
super().__init__()
# Analyticdb requires collection_name length less than 60.
# it's ok for normal usage.
self.collection_name = self.collection_name.replace("_test", "")
- self.vector = AnalyticdbVector(
- collection_name=self.collection_name,
- config=AnalyticdbConfig(
- access_key_id="test_key_id",
- access_key_secret="test_key_secret",
- region_id="test_region",
- instance_id="test_id",
- account="test_account",
- account_password="test_passwd",
- namespace="difytest_namespace",
- collection="difytest_collection",
- namespace_password="test_passwd",
- ),
- )
+ if config_type == "sql":
+ self.vector = AnalyticdbVector(
+ collection_name=self.collection_name,
+ sql_config=AnalyticdbVectorBySqlConfig(
+ host="test_host",
+ port=5432,
+ account="test_account",
+ account_password="test_passwd",
+ namespace="difytest_namespace",
+ ),
+ api_config=None,
+ )
+ else:
+ self.vector = AnalyticdbVector(
+ collection_name=self.collection_name,
+ sql_config=None,
+ api_config=AnalyticdbVectorOpenAPIConfig(
+ access_key_id="test_key_id",
+ access_key_secret="test_key_secret",
+ region_id="test_region",
+ instance_id="test_id",
+ account="test_account",
+ account_password="test_passwd",
+ namespace="difytest_namespace",
+ collection="difytest_collection",
+ namespace_password="test_passwd",
+ ),
+ )
def run_all_tests(self):
self.vector.delete()
@@ -29,4 +45,5 @@ class AnalyticdbVectorTest(AbstractVectorTest):
def test_chroma_vector(setup_mock_redis):
- AnalyticdbVectorTest().run_all_tests()
+ AnalyticdbVectorTest("api").run_all_tests()
+ AnalyticdbVectorTest("sql").run_all_tests()
diff --git a/api/tests/integration_tests/workflow/test_sync_workflow.py b/api/tests/integration_tests/workflow/test_sync_workflow.py
index df2ec95ebc..be270cdc49 100644
--- a/api/tests/integration_tests/workflow/test_sync_workflow.py
+++ b/api/tests/integration_tests/workflow/test_sync_workflow.py
@@ -27,8 +27,8 @@ NEW_VERSION_WORKFLOW_FEATURES = {
"file_upload": {
"enabled": True,
"allowed_file_types": ["image"],
- "allowed_extensions": [],
- "allowed_upload_methods": ["remote_url", "local_file"],
+ "allowed_file_extensions": [],
+ "allowed_file_upload_methods": ["remote_url", "local_file"],
"number_limits": 6,
},
"opening_statement": "",
diff --git a/api/tests/unit_tests/core/test_file.py b/api/tests/unit_tests/core/test_file.py
index aa61c1c6f7..4edbc01cc7 100644
--- a/api/tests/unit_tests/core/test_file.py
+++ b/api/tests/unit_tests/core/test_file.py
@@ -1,4 +1,7 @@
-from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType
+import json
+
+from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType, FileUploadConfig
+from models.workflow import Workflow
def test_file_loads_and_dumps():
@@ -38,3 +41,40 @@ def test_file_to_dict():
file_dict = file.to_dict()
assert "_extra_config" not in file_dict
assert "url" in file_dict
+
+
+def test_workflow_features_with_image():
+ # Create a feature dict that mimics the old structure with image config
+ features = {
+ "file_upload": {
+ "image": {"enabled": True, "number_limits": 5, "transfer_methods": ["remote_url", "local_file"]}
+ }
+ }
+
+ # Create a workflow instance with the features
+ workflow = Workflow(
+ tenant_id="tenant-1",
+ app_id="app-1",
+ type="chat",
+ version="1.0",
+ graph="{}",
+ features=json.dumps(features),
+ created_by="user-1",
+ environment_variables=[],
+ conversation_variables=[],
+ )
+
+ # Get the converted features through the property
+ converted_features = json.loads(workflow.features)
+
+ # Create FileUploadConfig from the converted features
+ file_upload_config = FileUploadConfig.model_validate(converted_features["file_upload"])
+
+ # Validate the config
+ assert file_upload_config.number_limits == 5
+ assert list(file_upload_config.allowed_file_types) == [FileType.IMAGE]
+ assert list(file_upload_config.allowed_file_upload_methods) == [
+ FileTransferMethod.REMOTE_URL,
+ FileTransferMethod.LOCAL_FILE,
+ ]
+ assert list(file_upload_config.allowed_file_extensions) == []
diff --git a/api/tests/unit_tests/core/test_model_manager.py b/api/tests/unit_tests/core/test_model_manager.py
index 2808b5b0fa..d98e9f6bad 100644
--- a/api/tests/unit_tests/core/test_model_manager.py
+++ b/api/tests/unit_tests/core/test_model_manager.py
@@ -1,10 +1,12 @@
-from unittest.mock import MagicMock
+from unittest.mock import MagicMock, patch
import pytest
+import redis
from core.entities.provider_entities import ModelLoadBalancingConfiguration
from core.model_manager import LBModelManager
from core.model_runtime.entities.model_entities import ModelType
+from extensions.ext_redis import redis_client
@pytest.fixture
@@ -38,6 +40,9 @@ def lb_model_manager():
def test_lb_model_manager_fetch_next(mocker, lb_model_manager):
+ # initialize redis client
+ redis_client.initialize(redis.Redis())
+
assert len(lb_model_manager._load_balancing_configs) == 3
config1 = lb_model_manager._load_balancing_configs[0]
@@ -55,12 +60,13 @@ def test_lb_model_manager_fetch_next(mocker, lb_model_manager):
start_index += 1
return start_index
- mocker.patch("redis.Redis.incr", side_effect=incr)
- mocker.patch("redis.Redis.set", return_value=None)
- mocker.patch("redis.Redis.expire", return_value=None)
+ with (
+ patch.object(redis_client, "incr", side_effect=incr),
+ patch.object(redis_client, "set", return_value=None),
+ patch.object(redis_client, "expire", return_value=None),
+ ):
+ config = lb_model_manager.fetch_next()
+ assert config == config2
- config = lb_model_manager.fetch_next()
- assert config == config2
-
- config = lb_model_manager.fetch_next()
- assert config == config3
+ config = lb_model_manager.fetch_next()
+ assert config == config3
diff --git a/api/tests/unit_tests/core/workflow/nodes/http_request/test_entities.py b/api/tests/unit_tests/core/workflow/nodes/http_request/test_entities.py
new file mode 100644
index 0000000000..0f6b7e4ab6
--- /dev/null
+++ b/api/tests/unit_tests/core/workflow/nodes/http_request/test_entities.py
@@ -0,0 +1,140 @@
+from unittest.mock import Mock, PropertyMock, patch
+
+import httpx
+import pytest
+
+from core.workflow.nodes.http_request.entities import Response
+
+
+@pytest.fixture
+def mock_response():
+ response = Mock(spec=httpx.Response)
+ response.headers = {}
+ return response
+
+
+def test_is_file_with_attachment_disposition(mock_response):
+ """Test is_file when content-disposition header contains 'attachment'"""
+ mock_response.headers = {"content-disposition": "attachment; filename=test.pdf", "content-type": "application/pdf"}
+ response = Response(mock_response)
+ assert response.is_file
+
+
+def test_is_file_with_filename_disposition(mock_response):
+ """Test is_file when content-disposition header contains filename parameter"""
+ mock_response.headers = {"content-disposition": "inline; filename=test.pdf", "content-type": "application/pdf"}
+ response = Response(mock_response)
+ assert response.is_file
+
+
+@pytest.mark.parametrize("content_type", ["application/pdf", "image/jpeg", "audio/mp3", "video/mp4"])
+def test_is_file_with_file_content_types(mock_response, content_type):
+ """Test is_file with various file content types"""
+ mock_response.headers = {"content-type": content_type}
+ # Mock binary content
+ type(mock_response).content = PropertyMock(return_value=bytes([0x00, 0xFF] * 512))
+ response = Response(mock_response)
+ assert response.is_file, f"Content type {content_type} should be identified as a file"
+
+
+@pytest.mark.parametrize(
+ "content_type",
+ [
+ "application/json",
+ "application/xml",
+ "application/javascript",
+ "application/x-www-form-urlencoded",
+ "application/yaml",
+ "application/graphql",
+ ],
+)
+def test_text_based_application_types(mock_response, content_type):
+ """Test common text-based application types are not identified as files"""
+ mock_response.headers = {"content-type": content_type}
+ response = Response(mock_response)
+ assert not response.is_file, f"Content type {content_type} should not be identified as a file"
+
+
+@pytest.mark.parametrize(
+ ("content", "content_type"),
+ [
+ (b'{"key": "value"}', "application/octet-stream"),
+ (b"[1, 2, 3]", "application/unknown"),
+ (b"function test() {}", "application/x-unknown"),
+ (b"test", "application/binary"),
+ (b"var x = 1;", "application/data"),
+ ],
+)
+def test_content_based_detection(mock_response, content, content_type):
+ """Test content-based detection for text-like content"""
+ mock_response.headers = {"content-type": content_type}
+ type(mock_response).content = PropertyMock(return_value=content)
+ response = Response(mock_response)
+ assert not response.is_file, f"Content {content} with type {content_type} should not be identified as a file"
+
+
+@pytest.mark.parametrize(
+ ("content", "content_type"),
+ [
+ (bytes([0x00, 0xFF] * 512), "application/octet-stream"),
+ (bytes([0x89, 0x50, 0x4E, 0x47]), "application/unknown"), # PNG magic numbers
+ (bytes([0xFF, 0xD8, 0xFF]), "application/binary"), # JPEG magic numbers
+ ],
+)
+def test_binary_content_detection(mock_response, content, content_type):
+ """Test content-based detection for binary content"""
+ mock_response.headers = {"content-type": content_type}
+ type(mock_response).content = PropertyMock(return_value=content)
+ response = Response(mock_response)
+ assert response.is_file, f"Binary content with type {content_type} should be identified as a file"
+
+
+@pytest.mark.parametrize(
+ ("content_type", "expected_main_type"),
+ [
+ ("x-world/x-vrml", "model"), # VRML 3D model
+ ("font/ttf", "application"), # TrueType font
+ ("text/csv", "text"), # CSV text file
+ ("unknown/xyz", None), # Unknown type
+ ],
+)
+def test_mimetype_based_detection(mock_response, content_type, expected_main_type):
+ """Test detection using mimetypes.guess_type for non-application content types"""
+ mock_response.headers = {"content-type": content_type}
+ type(mock_response).content = PropertyMock(return_value=bytes([0x00])) # Dummy content
+
+ with patch("core.workflow.nodes.http_request.entities.mimetypes.guess_type") as mock_guess_type:
+ # Mock the return value based on expected_main_type
+ if expected_main_type:
+ mock_guess_type.return_value = (f"{expected_main_type}/subtype", None)
+ else:
+ mock_guess_type.return_value = (None, None)
+
+ response = Response(mock_response)
+
+ # Check if the result matches our expectation
+ if expected_main_type in ("application", "image", "audio", "video"):
+ assert response.is_file, f"Content type {content_type} should be identified as a file"
+ else:
+ assert not response.is_file, f"Content type {content_type} should not be identified as a file"
+
+ # Verify that guess_type was called
+ mock_guess_type.assert_called_once()
+
+
+def test_is_file_with_inline_disposition(mock_response):
+ """Test is_file when content-disposition is 'inline'"""
+ mock_response.headers = {"content-disposition": "inline", "content-type": "application/pdf"}
+ # Mock binary content
+ type(mock_response).content = PropertyMock(return_value=bytes([0x00, 0xFF] * 512))
+ response = Response(mock_response)
+ assert response.is_file
+
+
+def test_is_file_with_no_content_disposition(mock_response):
+ """Test is_file when no content-disposition header is present"""
+ mock_response.headers = {"content-type": "application/pdf"}
+ # Mock binary content
+ type(mock_response).content = PropertyMock(return_value=bytes([0x00, 0xFF] * 512))
+ response = Response(mock_response)
+ assert response.is_file
diff --git a/api/tests/unit_tests/utils/test_text_processing.py b/api/tests/unit_tests/utils/test_text_processing.py
new file mode 100644
index 0000000000..f9d00d0b39
--- /dev/null
+++ b/api/tests/unit_tests/utils/test_text_processing.py
@@ -0,0 +1,20 @@
+from textwrap import dedent
+
+import pytest
+
+from core.tools.utils.text_processing_utils import remove_leading_symbols
+
+
+@pytest.mark.parametrize(
+ ("input_text", "expected_output"),
+ [
+ ("...Hello, World!", "Hello, World!"),
+ ("。测试中文标点", "测试中文标点"),
+ ("!@#Test symbols", "Test symbols"),
+ ("Hello, World!", "Hello, World!"),
+ ("", ""),
+ (" ", " "),
+ ],
+)
+def test_remove_leading_symbols(input_text, expected_output):
+ assert remove_leading_symbols(input_text) == expected_output
diff --git a/docker-legacy/docker-compose.yaml b/docker-legacy/docker-compose.yaml
index 9c2a1fe980..7bf2cd4708 100644
--- a/docker-legacy/docker-compose.yaml
+++ b/docker-legacy/docker-compose.yaml
@@ -2,7 +2,7 @@ version: '3'
services:
# API service
api:
- image: langgenius/dify-api:0.11.1
+ image: langgenius/dify-api:0.11.2
restart: always
environment:
# Startup mode, 'api' starts the API server.
@@ -227,7 +227,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
- image: langgenius/dify-api:0.11.1
+ image: langgenius/dify-api:0.11.2
restart: always
environment:
CONSOLE_WEB_URL: ''
@@ -397,7 +397,7 @@ services:
# Frontend web application.
web:
- image: langgenius/dify-web:0.11.1
+ image: langgenius/dify-web:0.11.2
restart: always
environment:
# The base URL of console application api server, refers to the Console base URL of WEB service if console domain is
diff --git a/docker/.env.example b/docker/.env.example
index cf09f72bce..d29c66535d 100644
--- a/docker/.env.example
+++ b/docker/.env.example
@@ -49,7 +49,7 @@ FILES_URL=
# Supported values are `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL`
LOG_LEVEL=INFO
# Log file path
-LOG_FILE=
+LOG_FILE=/app/logs/server.log
# Log file max size, the unit is MB
LOG_FILE_MAX_SIZE=20
# Log file max backup count
@@ -75,7 +75,8 @@ SECRET_KEY=sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U
# Password for admin user initialization.
# If left unset, admin user will not be prompted for a password
-# when creating the initial admin account.
+# when creating the initial admin account.
+# The length of the password cannot exceed 30 charactors.
INIT_PASSWORD=
# Deployment environment.
@@ -239,6 +240,12 @@ REDIS_SENTINEL_USERNAME=
REDIS_SENTINEL_PASSWORD=
REDIS_SENTINEL_SOCKET_TIMEOUT=0.1
+# List of Redis Cluster nodes. If Cluster mode is enabled, provide at least one Cluster IP and port.
+# Format: `:,:,:`
+REDIS_USE_CLUSTERS=false
+REDIS_CLUSTERS=
+REDIS_CLUSTERS_PASSWORD=
+
# ------------------------------
# Celery Configuration
# ------------------------------
@@ -450,6 +457,10 @@ ANALYTICDB_ACCOUNT=testaccount
ANALYTICDB_PASSWORD=testpassword
ANALYTICDB_NAMESPACE=dify
ANALYTICDB_NAMESPACE_PASSWORD=difypassword
+ANALYTICDB_HOST=gp-test.aliyuncs.com
+ANALYTICDB_PORT=5432
+ANALYTICDB_MIN_CONNECTION=1
+ANALYTICDB_MAX_CONNECTION=5
# TiDB vector configurations, only available when VECTOR_STORE is `tidb`
TIDB_VECTOR_HOST=tidb
@@ -558,7 +569,7 @@ UPLOAD_FILE_SIZE_LIMIT=15
# The maximum number of files that can be uploaded at a time, default 5.
UPLOAD_FILE_BATCH_LIMIT=5
-# ETl type, support: `dify`, `Unstructured`
+# ETL type, support: `dify`, `Unstructured`
# `dify` Dify's proprietary file extraction scheme
# `Unstructured` Unstructured.io file extraction scheme
ETL_TYPE=dify
@@ -916,4 +927,4 @@ POSITION_PROVIDER_EXCLUDES=
CSP_WHITELIST=
# Enable or disable create tidb service job
-CREATE_TIDB_SERVICE_JOB_ENABLED=false
\ No newline at end of file
+CREATE_TIDB_SERVICE_JOB_ENABLED=false
diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml
index 0de68c5299..f49dfb2ff7 100644
--- a/docker/docker-compose.yaml
+++ b/docker/docker-compose.yaml
@@ -55,6 +55,9 @@ x-shared-env: &shared-api-worker-env
REDIS_SENTINEL_USERNAME: ${REDIS_SENTINEL_USERNAME:-}
REDIS_SENTINEL_PASSWORD: ${REDIS_SENTINEL_PASSWORD:-}
REDIS_SENTINEL_SOCKET_TIMEOUT: ${REDIS_SENTINEL_SOCKET_TIMEOUT:-0.1}
+ REDIS_CLUSTERS: ${REDIS_CLUSTERS:-}
+ REDIS_USE_CLUSTERS: ${REDIS_USE_CLUSTERS:-false}
+ REDIS_CLUSTERS_PASSWORD: ${REDIS_CLUSTERS_PASSWORD:-}
ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60}
CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://:difyai123456@redis:6379/1}
BROKER_USE_SSL: ${BROKER_USE_SSL:-false}
@@ -185,6 +188,10 @@ x-shared-env: &shared-api-worker-env
ANALYTICDB_PASSWORD: ${ANALYTICDB_PASSWORD:-}
ANALYTICDB_NAMESPACE: ${ANALYTICDB_NAMESPACE:-dify}
ANALYTICDB_NAMESPACE_PASSWORD: ${ANALYTICDB_NAMESPACE_PASSWORD:-}
+ ANALYTICDB_HOST: ${ANALYTICDB_HOST:-}
+ ANALYTICDB_PORT: ${ANALYTICDB_PORT:-5432}
+ ANALYTICDB_MIN_CONNECTION: ${ANALYTICDB_MIN_CONNECTION:-1}
+ ANALYTICDB_MAX_CONNECTION: ${ANALYTICDB_MAX_CONNECTION:-5}
OPENSEARCH_HOST: ${OPENSEARCH_HOST:-opensearch}
OPENSEARCH_PORT: ${OPENSEARCH_PORT:-9200}
OPENSEARCH_USER: ${OPENSEARCH_USER:-admin}
@@ -283,7 +290,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
- image: langgenius/dify-api:0.11.1
+ image: langgenius/dify-api:0.11.2
restart: always
environment:
# Use the shared environment variables.
@@ -303,7 +310,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
- image: langgenius/dify-api:0.11.1
+ image: langgenius/dify-api:0.11.2
restart: always
environment:
# Use the shared environment variables.
@@ -322,7 +329,7 @@ services:
# Frontend web application.
web:
- image: langgenius/dify-web:0.11.1
+ image: langgenius/dify-web:0.11.2
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
diff --git a/web/.gitignore b/web/.gitignore
index efcbf2bfcd..048c5f6485 100644
--- a/web/.gitignore
+++ b/web/.gitignore
@@ -50,3 +50,7 @@ package-lock.json
# storybook
/storybook-static
*storybook.log
+
+# mise
+mise.toml
+
diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/chartView.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/chartView.tsx
index b01bc1b856..b5d3462dfa 100644
--- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/chartView.tsx
+++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/chartView.tsx
@@ -7,7 +7,7 @@ import type { PeriodParams } from '@/app/components/app/overview/appChart'
import { AvgResponseTime, AvgSessionInteractions, AvgUserInteractions, ConversationsChart, CostChart, EndUsersChart, MessagesChart, TokenPerSecond, UserSatisfactionRate, WorkflowCostChart, WorkflowDailyTerminalsChart, WorkflowMessagesChart } from '@/app/components/app/overview/appChart'
import type { Item } from '@/app/components/base/select'
import { SimpleSelect } from '@/app/components/base/select'
-import { TIME_PERIOD_LIST } from '@/app/components/app/log/filter'
+import { TIME_PERIOD_MAPPING } from '@/app/components/app/log/filter'
import { useStore as useAppStore } from '@/app/components/app/store'
dayjs.extend(quarterOfYear)
@@ -28,7 +28,7 @@ export default function ChartView({ appId }: IChartViewProps) {
const [period, setPeriod] = useState({ name: t('appLog.filter.period.last7days'), query: { start: today.subtract(7, 'day').startOf('day').format(queryDateFormat), end: today.endOf('day').format(queryDateFormat) } })
const onSelect = (item: Item) => {
- if (item.value === 'all') {
+ if (item.value === '-1') {
setPeriod({ name: item.name, query: undefined })
}
else if (item.value === 0) {
@@ -49,10 +49,15 @@ export default function ChartView({ appId }: IChartViewProps) {
{t('appOverview.analysis.title')}
({ value: item.value, name: t(`appLog.filter.period.${item.name}`) }))}
+ items={Object.entries(TIME_PERIOD_MAPPING).map(([k, v]) => ({ value: k, name: t(`appLog.filter.period.${v.name}`) }))}
className='mt-0 !w-40'
- onSelect={onSelect}
- defaultValue={7}
+ onSelect={(item) => {
+ const id = item.value
+ const value = TIME_PERIOD_MAPPING[id]?.value || '-1'
+ const name = item.name || t('appLog.filter.period.allTime')
+ onSelect({ value, name })
+ }}
+ defaultValue={'2'}
/>
{!isWorkflow && (
diff --git a/web/app/(commonLayout)/datasets/Container.tsx b/web/app/(commonLayout)/datasets/Container.tsx
index c30cc18418..3be8b2a968 100644
--- a/web/app/(commonLayout)/datasets/Container.tsx
+++ b/web/app/(commonLayout)/datasets/Container.tsx
@@ -27,6 +27,7 @@ import { useTabSearchParams } from '@/hooks/use-tab-searchparams'
import { useStore as useTagStore } from '@/app/components/base/tag-management/store'
import { useAppContext } from '@/context/app-context'
import { useExternalApiPanel } from '@/context/external-api-panel-context'
+
import { useQuery } from '@tanstack/react-query'
const Container = () => {
@@ -49,7 +50,7 @@ const Container = () => {
const containerRef = useRef(null)
const { data } = useQuery(
{
- queryKey: ['datasetApiBaseInfo', activeTab],
+ queryKey: ['datasetApiBaseInfo'],
queryFn: () => fetchDatasetApiBaseUrl('/datasets/api-base-info'),
enabled: activeTab !== 'dataset',
},
diff --git a/web/app/(commonLayout)/datasets/template/template.en.mdx b/web/app/(commonLayout)/datasets/template/template.en.mdx
index 02e23429ce..d3dcfc4b24 100644
--- a/web/app/(commonLayout)/datasets/template/template.en.mdx
+++ b/web/app/(commonLayout)/datasets/template/template.en.mdx
@@ -329,7 +329,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
diff --git a/web/app/(commonLayout)/datasets/template/template.zh.mdx b/web/app/(commonLayout)/datasets/template/template.zh.mdx
index e5d5f56120..db15ede9fc 100644
--- a/web/app/(commonLayout)/datasets/template/template.zh.mdx
+++ b/web/app/(commonLayout)/datasets/template/template.zh.mdx
@@ -329,7 +329,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
diff --git a/web/app/components/app/log/filter.tsx b/web/app/components/app/log/filter.tsx
index 1d67627577..787b7405c1 100644
--- a/web/app/components/app/log/filter.tsx
+++ b/web/app/components/app/log/filter.tsx
@@ -15,19 +15,19 @@ dayjs.extend(quarterOfYear)
const today = dayjs()
-export const TIME_PERIOD_LIST = [
- { value: 0, name: 'today' },
- { value: 7, name: 'last7days' },
- { value: 28, name: 'last4weeks' },
- { value: today.diff(today.subtract(3, 'month'), 'day'), name: 'last3months' },
- { value: today.diff(today.subtract(12, 'month'), 'day'), name: 'last12months' },
- { value: today.diff(today.startOf('month'), 'day'), name: 'monthToDate' },
- { value: today.diff(today.startOf('quarter'), 'day'), name: 'quarterToDate' },
- { value: today.diff(today.startOf('year'), 'day'), name: 'yearToDate' },
- { value: 'all', name: 'allTime' },
-]
+export const TIME_PERIOD_MAPPING: { [key: string]: { value: number; name: string } } = {
+ 1: { value: 0, name: 'today' },
+ 2: { value: 7, name: 'last7days' },
+ 3: { value: 28, name: 'last4weeks' },
+ 4: { value: today.diff(today.subtract(3, 'month'), 'day'), name: 'last3months' },
+ 5: { value: today.diff(today.subtract(12, 'month'), 'day'), name: 'last12months' },
+ 6: { value: today.diff(today.startOf('month'), 'day'), name: 'monthToDate' },
+ 7: { value: today.diff(today.startOf('quarter'), 'day'), name: 'quarterToDate' },
+ 8: { value: today.diff(today.startOf('year'), 'day'), name: 'yearToDate' },
+ 9: { value: -1, name: 'allTime' },
+}
-interface IFilterProps {
+type IFilterProps = {
isChatMode?: boolean
appId: string
queryParams: QueryParam
@@ -45,12 +45,12 @@ const Filter: FC = ({ isChatMode, appId, queryParams, setQueryPara
className='min-w-[150px]'
panelClassName='w-[270px]'
leftIcon={}
- value={queryParams.period || 7}
+ value={queryParams.period}
onSelect={(item) => {
- setQueryParams({ ...queryParams, period: item.value as string })
+ setQueryParams({ ...queryParams, period: item.value })
}}
- onClear={() => setQueryParams({ ...queryParams, period: 7 })}
- items={TIME_PERIOD_LIST.map(item => ({ value: item.value, name: t(`appLog.filter.period.${item.name}`) }))}
+ onClear={() => setQueryParams({ ...queryParams, period: '9' })}
+ items={Object.entries(TIME_PERIOD_MAPPING).map(([k, v]) => ({ value: k, name: t(`appLog.filter.period.${v.name}`) }))}
/>
= ({ appUrl }) => {
const Logs: FC = ({ appDetail }) => {
const { t } = useTranslation()
const [queryParams, setQueryParams] = useState({
- period: 7,
+ period: '2',
annotation_status: 'all',
sort_by: '-created_at',
})
@@ -68,9 +68,9 @@ const Logs: FC = ({ appDetail }) => {
const query = {
page: currPage + 1,
limit: APP_PAGE_LIMIT,
- ...(debouncedQueryParams.period !== 'all'
+ ...((debouncedQueryParams.period !== '9')
? {
- start: dayjs().subtract(debouncedQueryParams.period as number, 'day').startOf('day').format('YYYY-MM-DD HH:mm'),
+ start: dayjs().subtract(TIME_PERIOD_MAPPING[debouncedQueryParams.period].value, 'day').startOf('day').format('YYYY-MM-DD HH:mm'),
end: dayjs().endOf('day').format('YYYY-MM-DD HH:mm'),
}
: {}),
@@ -130,7 +130,7 @@ const Logs: FC = ({ appDetail }) => {
{t('appLog.table.pagination.previous')}
-
+
= ({ logs, appDetail, onRefresh })
const [showDrawer, setShowDrawer] = useState(false) // Whether to display the chat details drawer
const [currentConversation, setCurrentConversation] = useState() // Currently selected conversation
const isChatMode = appDetail.mode !== 'completion' // Whether the app is a chat app
+ const { setShowPromptLogModal, setShowAgentLogModal } = useAppStore(useShallow(state => ({
+ setShowPromptLogModal: state.setShowPromptLogModal,
+ setShowAgentLogModal: state.setShowAgentLogModal,
+ })))
// Annotated data needs to be highlighted
const renderTdValue = (value: string | number | null, isEmptyStyle: boolean, isHighlight = false, annotation?: LogAnnotation) => {
@@ -700,6 +704,8 @@ const ConversationList: FC = ({ logs, appDetail, onRefresh })
onRefresh()
setShowDrawer(false)
setCurrentConversation(undefined)
+ setShowPromptLogModal(false)
+ setShowAgentLogModal(false)
}
if (!logs)
diff --git a/web/app/components/base/chat/chat/chat-input-area/index.tsx b/web/app/components/base/chat/chat/chat-input-area/index.tsx
index 32d841148a..5169e65a59 100644
--- a/web/app/components/base/chat/chat/chat-input-area/index.tsx
+++ b/web/app/components/base/chat/chat/chat-input-area/index.tsx
@@ -1,5 +1,6 @@
import {
useCallback,
+ useRef,
useState,
} from 'react'
import Textarea from 'rc-textarea'
@@ -73,7 +74,8 @@ const ChatInputArea = ({
isDragActive,
} = useFile(visionConfig!)
const { checkInputsForm } = useCheckInputsForms()
-
+ const historyRef = useRef([''])
+ const [currentIndex, setCurrentIndex] = useState(-1)
const handleSend = () => {
if (onSend) {
const { files, setFiles } = filesStore.getState()
@@ -92,13 +94,33 @@ const ChatInputArea = ({
}
}
}
-
const handleKeyDown = (e: React.KeyboardEvent) => {
if (e.key === 'Enter' && !e.shiftKey && !e.nativeEvent.isComposing) {
e.preventDefault()
setQuery(query.replace(/\n$/, ''))
+ historyRef.current.push(query)
+ setCurrentIndex(historyRef.current.length)
handleSend()
}
+ else if (e.key === 'ArrowUp' && !e.shiftKey && !e.nativeEvent.isComposing) {
+ // When the up key is pressed, output the previous element
+ if (currentIndex > 0) {
+ setCurrentIndex(currentIndex - 1)
+ setQuery(historyRef.current[currentIndex - 1])
+ }
+ }
+ else if (e.key === 'ArrowDown' && !e.shiftKey && !e.nativeEvent.isComposing) {
+ // When the down key is pressed, output the next element
+ if (currentIndex < historyRef.current.length - 1) {
+ setCurrentIndex(currentIndex + 1)
+ setQuery(historyRef.current[currentIndex + 1])
+ }
+ else if (currentIndex === historyRef.current.length - 1) {
+ // If it is the last element, clear the input box
+ setCurrentIndex(historyRef.current.length)
+ setQuery('')
+ }
+ }
}
const handleShowVoiceInput = useCallback(() => {
diff --git a/web/app/components/base/file-uploader/file-input.tsx b/web/app/components/base/file-uploader/file-input.tsx
index ff71cf1030..f7d659c66f 100644
--- a/web/app/components/base/file-uploader/file-input.tsx
+++ b/web/app/components/base/file-uploader/file-input.tsx
@@ -13,15 +13,24 @@ const FileInput = ({
const files = useStore(s => s.files)
const { handleLocalFileUpload } = useFile(fileConfig)
const handleChange = (e: React.ChangeEvent) => {
- const file = e.target.files?.[0]
+ const targetFiles = e.target.files
- if (file)
- handleLocalFileUpload(file)
+ if (targetFiles) {
+ if (fileConfig.number_limits) {
+ for (let i = 0; i < targetFiles.length; i++) {
+ if (i + 1 + files.length <= fileConfig.number_limits)
+ handleLocalFileUpload(targetFiles[i])
+ }
+ }
+ else {
+ handleLocalFileUpload(targetFiles[0])
+ }
+ }
}
const allowedFileTypes = fileConfig.allowed_file_types
const isCustom = allowedFileTypes?.includes(SupportUploadFileTypes.custom)
- const exts = isCustom ? (fileConfig.allowed_file_extensions?.map(item => `.${item}`) || []) : (allowedFileTypes?.map(type => FILE_EXTS[type]) || []).flat().map(item => `.${item}`)
+ const exts = isCustom ? (fileConfig.allowed_file_extensions || []) : (allowedFileTypes?.map(type => FILE_EXTS[type]) || []).flat().map(item => `.${item}`)
const accept = exts.join(',')
return (
@@ -32,6 +41,7 @@ const FileInput = ({
onChange={handleChange}
accept={accept}
disabled={!!(fileConfig.number_limits && files.length >= fileConfig?.number_limits)}
+ multiple={!!fileConfig.number_limits && fileConfig.number_limits > 1}
/>
)
}
diff --git a/web/app/components/base/file-uploader/file-uploader-in-chat-input/file-item.tsx b/web/app/components/base/file-uploader/file-uploader-in-chat-input/file-item.tsx
index a051b89ec1..fcf665643c 100644
--- a/web/app/components/base/file-uploader/file-uploader-in-chat-input/file-item.tsx
+++ b/web/app/components/base/file-uploader/file-uploader-in-chat-input/file-item.tsx
@@ -98,6 +98,7 @@ const FileItem = ({
)
}
diff --git a/web/app/components/base/file-uploader/hooks.ts b/web/app/components/base/file-uploader/hooks.ts
index c735754ffe..256202d783 100644
--- a/web/app/components/base/file-uploader/hooks.ts
+++ b/web/app/components/base/file-uploader/hooks.ts
@@ -241,7 +241,7 @@ export const useFile = (fileConfig: FileUpload) => {
notify({ type: 'error', message: t('common.fileUploader.pasteFileLinkInvalid') })
handleRemoveFile(uploadingFile.id)
})
- }, [checkSizeLimit, handleAddFile, handleUpdateFile, notify, t, handleRemoveFile, fileConfig?.allowed_file_types, fileConfig.allowed_file_extensions, startProgressTimer])
+ }, [checkSizeLimit, handleAddFile, handleUpdateFile, notify, t, handleRemoveFile, fileConfig?.allowed_file_types, fileConfig.allowed_file_extensions, startProgressTimer, params.token])
const handleLoadFileFromLinkSuccess = useCallback(() => { }, [])
diff --git a/web/app/components/base/file-uploader/utils.ts b/web/app/components/base/file-uploader/utils.ts
index eb9199d74b..aa8625f221 100644
--- a/web/app/components/base/file-uploader/utils.ts
+++ b/web/app/components/base/file-uploader/utils.ts
@@ -44,21 +44,24 @@ export const fileUpload: FileUpload = ({
}
export const getFileExtension = (fileName: string, fileMimetype: string, isRemote?: boolean) => {
+ let extension = ''
if (fileMimetype)
- return mime.getExtension(fileMimetype) || ''
+ extension = mime.getExtension(fileMimetype) || ''
- if (isRemote)
- return ''
-
- if (fileName) {
+ if (fileName && !extension) {
const fileNamePair = fileName.split('.')
const fileNamePairLength = fileNamePair.length
if (fileNamePairLength > 1)
- return fileNamePair[fileNamePairLength - 1]
+ extension = fileNamePair[fileNamePairLength - 1]
+ else
+ extension = ''
}
- return ''
+ if (isRemote)
+ extension = ''
+
+ return extension
}
export const getFileAppearanceType = (fileName: string, fileMimetype: string) => {
@@ -145,7 +148,7 @@ export const getFileNameFromUrl = (url: string) => {
export const getSupportFileExtensionList = (allowFileTypes: string[], allowFileExtensions: string[]) => {
if (allowFileTypes.includes(SupportUploadFileTypes.custom))
- return allowFileExtensions.map(item => item.toUpperCase())
+ return allowFileExtensions.map(item => item.slice(1).toUpperCase())
return allowFileTypes.map(type => FILE_EXTS[type]).flat()
}
diff --git a/web/app/components/base/markdown.tsx b/web/app/components/base/markdown.tsx
index 48d1d2a0a5..37fbc2dfbd 100644
--- a/web/app/components/base/markdown.tsx
+++ b/web/app/components/base/markdown.tsx
@@ -138,7 +138,7 @@ const CodeBlock: Components['code'] = memo(({ className, children, ...props }) =
try {
return JSON.parse(String(children).replace(/\n$/, ''))
}
- catch {}
+ catch { }
}
return JSON.parse('{"title":{"text":"ECharts error - Wrong JSON format."}}')
}, [language, children])
@@ -196,7 +196,7 @@ const CodeBlock: Components['code'] = memo(({ className, children, ...props }) =
>
{languageShowName}
- {(['mermaid', 'svg']).includes(language!) &&
}
+ {(['mermaid', 'svg']).includes(language!) && }
{
})
// AudioBlock.displayName = 'AudioBlock'
+const ScriptBlock = memo(({ node }: any) => {
+ const scriptContent = node.children[0]?.value || ''
+ return ``
+})
+ScriptBlock.displayName = 'ScriptBlock'
+
const Paragraph: Components['p'] = ({ node, children }) => {
const children_node = node!.children
if (children_node && children_node[0] && 'tagName' in children_node[0] && children_node[0].tagName === 'img')
@@ -278,7 +284,7 @@ export function Markdown(props: { content: string; className?: string }) {
}
},
]}
- disallowedElements={['script', 'iframe', 'head', 'html', 'meta', 'link', 'style', 'body']}
+ disallowedElements={['iframe', 'head', 'html', 'meta', 'link', 'style', 'body']}
components={{
pre: PreBlock,
code: CodeBlock,
@@ -289,6 +295,7 @@ export function Markdown(props: { content: string; className?: string }) {
p: Paragraph,
button: MarkdownButton,
form: MarkdownForm,
+ script: ScriptBlock,
}}
>
{/* Markdown detect has problem. */}
diff --git a/web/app/components/develop/doc.tsx b/web/app/components/develop/doc.tsx
index eddc07d916..ce5471676d 100644
--- a/web/app/components/develop/doc.tsx
+++ b/web/app/components/develop/doc.tsx
@@ -1,5 +1,8 @@
'use client'
+import { useEffect, useState } from 'react'
import { useContext } from 'use-context-selector'
+import { useTranslation } from 'react-i18next'
+import { RiListUnordered } from '@remixicon/react'
import TemplateEn from './template/template.en.mdx'
import TemplateZh from './template/template.zh.mdx'
import TemplateAdvancedChatEn from './template/template_advanced_chat.en.mdx'
@@ -17,6 +20,9 @@ type IDocProps = {
const Doc = ({ appDetail }: IDocProps) => {
const { locale } = useContext(I18n)
+ const { t } = useTranslation()
+ const [toc, setToc] = useState>([])
+ const [isTocExpanded, setIsTocExpanded] = useState(false)
const variables = appDetail?.model_config?.configs?.prompt_variables || []
const inputs = variables.reduce((res: any, variable: any) => {
@@ -24,21 +30,87 @@ const Doc = ({ appDetail }: IDocProps) => {
return res
}, {})
+ useEffect(() => {
+ const mediaQuery = window.matchMedia('(min-width: 1280px)')
+ setIsTocExpanded(mediaQuery.matches)
+ }, [])
+
+ useEffect(() => {
+ const extractTOC = () => {
+ const article = document.querySelector('article')
+ if (article) {
+ const headings = article.querySelectorAll('h2')
+ const tocItems = Array.from(headings).map((heading) => {
+ const anchor = heading.querySelector('a')
+ if (anchor) {
+ return {
+ href: anchor.getAttribute('href') || '',
+ text: anchor.textContent || '',
+ }
+ }
+ return null
+ }).filter((item): item is { href: string; text: string } => item !== null)
+ setToc(tocItems)
+ }
+ }
+
+ // Run after component has rendered
+ setTimeout(extractTOC, 0)
+ }, [appDetail, locale])
+
return (
-
- {(appDetail?.mode === 'chat' || appDetail?.mode === 'agent-chat') && (
- locale !== LanguagesSupported[1] ? :
- )}
- {appDetail?.mode === 'advanced-chat' && (
- locale !== LanguagesSupported[1] ? :
- )}
- {appDetail?.mode === 'workflow' && (
- locale !== LanguagesSupported[1] ? :
- )}
- {appDetail?.mode === 'completion' && (
- locale !== LanguagesSupported[1] ? :
- )}
-
+
+
+ {isTocExpanded
+ ? (
+
+ )
+ : (
+
+ )}
+
+
+ {(appDetail?.mode === 'chat' || appDetail?.mode === 'agent-chat') && (
+ locale !== LanguagesSupported[1] ? :
+ )}
+ {appDetail?.mode === 'advanced-chat' && (
+ locale !== LanguagesSupported[1] ? :
+ )}
+ {appDetail?.mode === 'workflow' && (
+ locale !== LanguagesSupported[1] ? :
+ )}
+ {appDetail?.mode === 'completion' && (
+ locale !== LanguagesSupported[1] ? :
+ )}
+
+
)
}
diff --git a/web/app/components/develop/template/template.en.mdx b/web/app/components/develop/template/template.en.mdx
index 61ecd7ae97..c923ea30db 100755
--- a/web/app/components/develop/template/template.en.mdx
+++ b/web/app/components/develop/template/template.en.mdx
@@ -503,7 +503,7 @@ The text generation application offers non-session support and is ideal for tran
diff --git a/web/app/components/develop/template/template_advanced_chat.en.mdx b/web/app/components/develop/template/template_advanced_chat.en.mdx
index c3c3f7c6f3..7d64caa769 100644
--- a/web/app/components/develop/template/template_advanced_chat.en.mdx
+++ b/web/app/components/develop/template/template_advanced_chat.en.mdx
@@ -480,7 +480,7 @@ Chat applications support session persistence, allowing previous chat history to
@@ -884,7 +884,7 @@ Chat applications support session persistence, allowing previous chat history to
diff --git a/web/app/components/develop/template/template_chat.en.mdx b/web/app/components/develop/template/template_chat.en.mdx
index f44f991b89..ac8ee9d657 100644
--- a/web/app/components/develop/template/template_chat.en.mdx
+++ b/web/app/components/develop/template/template_chat.en.mdx
@@ -444,7 +444,7 @@ Chat applications support session persistence, allowing previous chat history to
@@ -918,7 +918,7 @@ Chat applications support session persistence, allowing previous chat history to
diff --git a/web/app/components/develop/template/template_workflow.en.mdx b/web/app/components/develop/template/template_workflow.en.mdx
index 6cb02bf844..be2ef54743 100644
--- a/web/app/components/develop/template/template_workflow.en.mdx
+++ b/web/app/components/develop/template/template_workflow.en.mdx
@@ -32,7 +32,7 @@ Workflow applications offers non-session support and is ideal for translation, a
@@ -505,7 +505,7 @@ Workflow applications offers non-session support and is ideal for translation, a
diff --git a/web/app/components/explore/sidebar/index.tsx b/web/app/components/explore/sidebar/index.tsx
index a4a40a00a2..13d5a0ec8f 100644
--- a/web/app/components/explore/sidebar/index.tsx
+++ b/web/app/components/explore/sidebar/index.tsx
@@ -11,6 +11,7 @@ import cn from '@/utils/classnames'
import { fetchInstalledAppList as doFetchInstalledAppList, uninstallApp, updatePinStatus } from '@/service/explore'
import ExploreContext from '@/context/explore-context'
import Confirm from '@/app/components/base/confirm'
+import Divider from '@/app/components/base/divider'
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
const SelectedDiscoveryIcon = () => (
@@ -89,6 +90,7 @@ const SideBar: FC = ({
fetchInstalledAppList()
}, [controlUpdateInstalledApps])
+ const pinnedAppsCount = installedApps.filter(({ is_pinned }) => is_pinned).length
return (
@@ -109,10 +111,9 @@ const SideBar: FC
= ({
height: 'calc(100vh - 250px)',
}}
>
- {installedApps.map(({ id, is_pinned, uninstallable, app: { name, icon_type, icon, icon_url, icon_background } }) => {
- return (
+ {installedApps.map(({ id, is_pinned, uninstallable, app: { name, icon_type, icon, icon_url, icon_background } }, index) => (
+
- = ({
setShowConfirm(true)
}}
/>
- )
- })}
+ {index === pinnedAppsCount - 1 && index !== installedApps.length - 1 && }
+
+ ))}
)}
diff --git a/web/app/components/header/account-setting/members-page/index.tsx b/web/app/components/header/account-setting/members-page/index.tsx
index 2eaee6f901..b599eb09e7 100644
--- a/web/app/components/header/account-setting/members-page/index.tsx
+++ b/web/app/components/header/account-setting/members-page/index.tsx
@@ -34,13 +34,12 @@ const MembersPage = () => {
}
const { locale } = useContext(I18n)
- const { userProfile, currentWorkspace, isCurrentWorkspaceManager } = useAppContext()
+ const { userProfile, currentWorkspace, isCurrentWorkspaceOwner, isCurrentWorkspaceManager } = useAppContext()
const { data, mutate } = useSWR({ url: '/workspaces/current/members' }, fetchMembers)
const [inviteModalVisible, setInviteModalVisible] = useState(false)
const [invitationResults, setInvitationResults] = useState([])
const [invitedModalVisible, setInvitedModalVisible] = useState(false)
const accounts = data?.accounts || []
- const owner = accounts.filter(account => account.role === 'owner')?.[0]?.email === userProfile.email
const { plan, enableBilling } = useProviderContext()
const isNotUnlimitedMemberPlan = enableBilling && plan.type !== Plan.team && plan.type !== Plan.enterprise
const isMemberFull = enableBilling && isNotUnlimitedMemberPlan && accounts.length >= plan.total.teamMembers
@@ -109,8 +108,8 @@ const MembersPage = () => {
{dayjs(Number((account.last_active_at || account.created_at)) * 1000).locale(locale === 'zh-Hans' ? 'zh-cn' : 'en').fromNow()}
{
- (owner && account.role !== 'owner')
- ?
+ ((isCurrentWorkspaceOwner && account.role !== 'owner') || (isCurrentWorkspaceManager && !['owner', 'admin'].includes(account.role)))
+ ?
:
{RoleMap[account.role] || RoleMap.normal}
}
diff --git a/web/app/components/header/account-setting/members-page/operation/index.tsx b/web/app/components/header/account-setting/members-page/operation/index.tsx
index e1fe25cb96..82867ec522 100644
--- a/web/app/components/header/account-setting/members-page/operation/index.tsx
+++ b/web/app/components/header/account-setting/members-page/operation/index.tsx
@@ -26,11 +26,13 @@ const itemDescClassName = `
type IOperationProps = {
member: Member
+ operatorRole: string
onOperate: () => void
}
const Operation = ({
member,
+ operatorRole,
onOperate,
}: IOperationProps) => {
const { t } = useTranslation()
@@ -43,11 +45,20 @@ const Operation = ({
dataset_operator: t('common.members.datasetOperator'),
}
const roleList = useMemo(() => {
- return [
- ...['admin', 'editor', 'normal'],
- ...(datasetOperatorEnabled ? ['dataset_operator'] : []),
- ]
- }, [datasetOperatorEnabled])
+ if (operatorRole === 'owner') {
+ return [
+ ...['admin', 'editor', 'normal'],
+ ...(datasetOperatorEnabled ? ['dataset_operator'] : []),
+ ]
+ }
+ if (operatorRole === 'admin') {
+ return [
+ ...['editor', 'normal'],
+ ...(datasetOperatorEnabled ? ['dataset_operator'] : []),
+ ]
+ }
+ return []
+ }, [operatorRole, datasetOperatorEnabled])
const { notify } = useContext(ToastContext)
const toHump = (name: string) => name.replace(/_(\w)/g, (all, letter) => letter.toUpperCase())
const handleDeleteMemberOrCancelInvitation = async () => {
diff --git a/web/app/components/header/index.tsx b/web/app/components/header/index.tsx
index 8f44bf6eae..a3b344e747 100644
--- a/web/app/components/header/index.tsx
+++ b/web/app/components/header/index.tsx
@@ -14,6 +14,7 @@ import PluginsNav from './plugins-nav'
import ExploreNav from './explore-nav'
import ToolsNav from './tools-nav'
import GithubStar from './github-star'
+import LicenseNav from './license-env'
import { WorkspaceProvider } from '@/context/workspace-context'
import { useAppContext } from '@/context/app-context'
import LogoSite from '@/app/components/base/logo/logo-site'
@@ -61,29 +62,29 @@ const Header = () => {
}
{!isMobile
- &&
-
-
-
-
/
-
-
-
-
- {enableBilling && (
-
-
-
-
-
- {t('billing.upgradeBtn.encourageShort')}
-
-
-
-
- )}
+ &&
+
+
+
+
/
+
+
+
+
+ {enableBilling && (
+
+
+
+
+
+ {t('billing.upgradeBtn.encourageShort')}
+
+
+
+
+ )}
+
-
}
{isMobile && (
@@ -116,6 +117,7 @@ const Header = () => {
)}
+
diff --git a/web/app/components/header/license-env/index.tsx b/web/app/components/header/license-env/index.tsx
new file mode 100644
index 0000000000..800d86d2b8
--- /dev/null
+++ b/web/app/components/header/license-env/index.tsx
@@ -0,0 +1,29 @@
+'use client'
+
+import AppContext from '@/context/app-context'
+import { LicenseStatus } from '@/types/feature'
+import { useTranslation } from 'react-i18next'
+import { useContextSelector } from 'use-context-selector'
+import dayjs from 'dayjs'
+
+const LicenseNav = () => {
+ const { t } = useTranslation()
+ const systemFeatures = useContextSelector(AppContext, s => s.systemFeatures)
+
+ if (systemFeatures.license?.status === LicenseStatus.EXPIRING) {
+ const expiredAt = systemFeatures.license?.expired_at
+ const count = dayjs(expiredAt).diff(dayjs(), 'days')
+ return
+ {count <= 1 && {t('common.license.expiring', { count })}}
+ {count > 1 && {t('common.license.expiring_plural', { count })}}
+
+ }
+ if (systemFeatures.license.status === LicenseStatus.ACTIVE) {
+ return
+ Enterprise
+
+ }
+ return null
+}
+
+export default LicenseNav
diff --git a/web/app/components/share/text-generation/index.tsx b/web/app/components/share/text-generation/index.tsx
index 5bfb6c0516..b6fce57225 100644
--- a/web/app/components/share/text-generation/index.tsx
+++ b/web/app/components/share/text-generation/index.tsx
@@ -392,7 +392,10 @@ const TextGeneration: FC
= ({
const { user_input_form, more_like_this, file_upload, text_to_speech }: any = appParams
setVisionConfig({
- ...file_upload.image,
+ // legacy of image upload compatible
+ ...file_upload,
+ transfer_methods: file_upload.allowed_file_upload_methods || file_upload.allowed_upload_methods,
+ // legacy of image upload compatible
image_file_size_limit: appParams?.system_parameters?.image_file_size_limit,
fileUploadConfig: appParams?.system_parameters,
})
diff --git a/web/app/components/workflow/nodes/_base/components/file-upload-setting.tsx b/web/app/components/workflow/nodes/_base/components/file-upload-setting.tsx
index 42a7213f80..1974084424 100644
--- a/web/app/components/workflow/nodes/_base/components/file-upload-setting.tsx
+++ b/web/app/components/workflow/nodes/_base/components/file-upload-setting.tsx
@@ -82,8 +82,6 @@ const FileUploadSetting: FC = ({
const handleCustomFileTypesChange = useCallback((customFileTypes: string[]) => {
const newPayload = produce(payload, (draft) => {
draft.allowed_file_extensions = customFileTypes.map((v) => {
- if (v.startsWith('.')) // Not start with dot
- return v.slice(1)
return v
})
})
@@ -118,7 +116,7 @@ const FileUploadSetting: FC = ({
type={SupportUploadFileTypes.custom}
selected={allowed_file_types.includes(SupportUploadFileTypes.custom)}
onToggle={handleSupportFileTypeChange}
- customFileTypes={allowed_file_extensions?.map(item => `.${item}`)}
+ customFileTypes={allowed_file_extensions}
onCustomFileTypesChange={handleCustomFileTypesChange}
/>
diff --git a/web/app/components/workflow/nodes/tool/components/input-var-list.tsx b/web/app/components/workflow/nodes/tool/components/input-var-list.tsx
index e47082f4b7..10c534509c 100644
--- a/web/app/components/workflow/nodes/tool/components/input-var-list.tsx
+++ b/web/app/components/workflow/nodes/tool/components/input-var-list.tsx
@@ -46,6 +46,8 @@ const InputVarList: FC
= ({
const paramType = (type: string) => {
if (type === FormTypeEnum.textNumber)
return 'Number'
+ else if (type === FormTypeEnum.file)
+ return 'File'
else if (type === FormTypeEnum.files)
return 'Files'
else if (type === FormTypeEnum.select)
diff --git a/web/app/components/workflow/note-node/index.tsx b/web/app/components/workflow/note-node/index.tsx
index ec2bb84f68..6d62b452e4 100644
--- a/web/app/components/workflow/note-node/index.tsx
+++ b/web/app/components/workflow/note-node/index.tsx
@@ -81,7 +81,6 @@ const NoteNode = ({
nodeData={data}
icon={}
minWidth={240}
- maxWidth={640}
minHeight={88}
/>
diff --git a/web/app/components/workflow/style.css b/web/app/components/workflow/style.css
index 9ec8586ccc..ca1d24a52e 100644
--- a/web/app/components/workflow/style.css
+++ b/web/app/components/workflow/style.css
@@ -15,4 +15,8 @@
#workflow-container .react-flow__selection {
border: 1px solid #528BFF;
background: rgba(21, 94, 239, 0.05);
+}
+
+#workflow-container .react-flow__node-custom-note {
+ z-index: -1000 !important;
}
\ No newline at end of file
diff --git a/web/app/signin/normalForm.tsx b/web/app/signin/normalForm.tsx
index f4f46c68ba..783d8ac507 100644
--- a/web/app/signin/normalForm.tsx
+++ b/web/app/signin/normalForm.tsx
@@ -2,7 +2,7 @@ import React, { useCallback, useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import Link from 'next/link'
import { useRouter, useSearchParams } from 'next/navigation'
-import { RiDoorLockLine } from '@remixicon/react'
+import { RiContractLine, RiDoorLockLine, RiErrorWarningFill } from '@remixicon/react'
import Loading from '../components/base/loading'
import MailAndCodeAuth from './components/mail-and-code-auth'
import MailAndPasswordAuth from './components/mail-and-password-auth'
@@ -10,7 +10,7 @@ import SocialAuth from './components/social-auth'
import SSOAuth from './components/sso-auth'
import cn from '@/utils/classnames'
import { getSystemFeatures, invitationCheck } from '@/service/common'
-import { defaultSystemFeatures } from '@/types/feature'
+import { LicenseStatus, defaultSystemFeatures } from '@/types/feature'
import Toast from '@/app/components/base/toast'
import { IS_CE_EDITION } from '@/config'
@@ -83,6 +83,48 @@ const NormalForm = () => {
}
+ if (systemFeatures.license?.status === LicenseStatus.LOST) {
+ return
+
+
+
+
+
+
+
{t('login.licenseLost')}
+
{t('login.licenseLostTip')}
+
+
+
+ }
+ if (systemFeatures.license?.status === LicenseStatus.EXPIRED) {
+ return
+
+
+
+
+
+
+
{t('login.licenseExpired')}
+
{t('login.licenseExpiredTip')}
+
+
+
+ }
+ if (systemFeatures.license?.status === LicenseStatus.INACTIVE) {
+ return
+
+
+
+
+
+
+
{t('login.licenseInactive')}
+
{t('login.licenseInactiveTip')}
+
+
+
+ }
return (
<>
diff --git a/web/context/app-context.tsx b/web/context/app-context.tsx
index 78ac1c9848..369fe5af19 100644
--- a/web/context/app-context.tsx
+++ b/web/context/app-context.tsx
@@ -144,7 +144,7 @@ export const AppContextProvider: FC
= ({ children }) =>
theme,
setTheme: handleSetTheme,
apps: appList.data,
- systemFeatures,
+ systemFeatures: { ...defaultSystemFeatures, ...systemFeatures },
mutateApps,
userProfile,
mutateUserProfile,
diff --git a/web/context/query-client.tsx b/web/context/query-client.tsx
index 1adb8af653..f85930515c 100644
--- a/web/context/query-client.tsx
+++ b/web/context/query-client.tsx
@@ -4,7 +4,15 @@ import type { FC, PropsWithChildren } from 'react'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { ReactQueryDevtools } from '@tanstack/react-query-devtools'
-const client = new QueryClient()
+const STALE_TIME = 1000 * 60 * 30 // 30 minutes
+
+const client = new QueryClient({
+ defaultOptions: {
+ queries: {
+ staleTime: STALE_TIME,
+ },
+ },
+})
export const TanstackQueryIniter: FC = (props) => {
const { children } = props
diff --git a/web/i18n/de-DE/common.ts b/web/i18n/de-DE/common.ts
index 0b6d6fa0b2..085371aeb1 100644
--- a/web/i18n/de-DE/common.ts
+++ b/web/i18n/de-DE/common.ts
@@ -591,6 +591,10 @@ const translation = {
uploadFromComputerReadError: 'Lesen der Datei fehlgeschlagen, bitte versuchen Sie es erneut.',
fileExtensionNotSupport: 'Dateiendung nicht bedient',
},
+ license: {
+ expiring: 'Läuft an einem Tag ab',
+ expiring_plural: 'Läuft in {{count}} Tagen ab',
+ },
}
export default translation
diff --git a/web/i18n/de-DE/login.ts b/web/i18n/de-DE/login.ts
index 6736e34914..2e0d51cf85 100644
--- a/web/i18n/de-DE/login.ts
+++ b/web/i18n/de-DE/login.ts
@@ -99,6 +99,12 @@ const translation = {
noLoginMethodTip: 'Wenden Sie sich an den Systemadministrator, um eine Authentifizierungsmethode hinzuzufügen.',
usePassword: 'Passwort verwenden',
noLoginMethod: 'Authentifizierungsmethode nicht konfiguriert',
+ licenseExpired: 'Lizenz abgelaufen',
+ licenseLostTip: 'Fehler beim Verbinden des Dify-Lizenzservers. Wenden Sie sich an Ihren Administrator, um Dify weiterhin zu verwenden.',
+ licenseInactive: 'Lizenz inaktiv',
+ licenseInactiveTip: 'Die Dify Enterprise-Lizenz für Ihren Arbeitsbereich ist inaktiv. Wenden Sie sich an Ihren Administrator, um Dify weiterhin zu verwenden.',
+ licenseExpiredTip: 'Die Dify Enterprise-Lizenz für Ihren Arbeitsbereich ist abgelaufen. Wenden Sie sich an Ihren Administrator, um Dify weiterhin zu verwenden.',
+ licenseLost: 'Lizenz verloren',
}
export default translation
diff --git a/web/i18n/de-DE/workflow.ts b/web/i18n/de-DE/workflow.ts
index d05070c308..9ee5289429 100644
--- a/web/i18n/de-DE/workflow.ts
+++ b/web/i18n/de-DE/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
type: 'Art',
binaryFileVariable: 'Variable der Binärdatei',
+ extractListPlaceholder: 'Geben Sie den Index des Listeneintrags ein, geben Sie \'/\' ein, fügen Sie die Variable ein',
},
code: {
inputVars: 'Eingabevariablen',
@@ -618,6 +619,7 @@ const translation = {
filterConditionKey: 'Bedingungsschlüssel filtern',
filterCondition: 'Filter-Bedingung',
selectVariableKeyPlaceholder: 'Untervariablenschlüssel auswählen',
+ extractsCondition: 'Extrahieren des N-Elements',
},
},
tracing: {
diff --git a/web/i18n/en-US/app-api.ts b/web/i18n/en-US/app-api.ts
index 355ff30602..1fba63c977 100644
--- a/web/i18n/en-US/app-api.ts
+++ b/web/i18n/en-US/app-api.ts
@@ -78,6 +78,7 @@ const translation = {
requestBody: 'Request Body',
pathParams: 'Path Params',
query: 'Query',
+ toc: 'Contents',
},
}
diff --git a/web/i18n/en-US/common.ts b/web/i18n/en-US/common.ts
index 97b158904b..757f05f2c1 100644
--- a/web/i18n/en-US/common.ts
+++ b/web/i18n/en-US/common.ts
@@ -602,6 +602,10 @@ const translation = {
created: 'Tag created successfully',
failed: 'Tag creation failed',
},
+ license: {
+ expiring: 'Expiring in one day',
+ expiring_plural: 'Expiring in {{count}} days',
+ },
}
export default translation
diff --git a/web/i18n/en-US/login.ts b/web/i18n/en-US/login.ts
index b47d7bd69a..5ff7b80b4e 100644
--- a/web/i18n/en-US/login.ts
+++ b/web/i18n/en-US/login.ts
@@ -98,6 +98,12 @@ const translation = {
back: 'Back',
noLoginMethod: 'Authentication method not configured',
noLoginMethodTip: 'Please contact the system admin to add an authentication method.',
+ licenseExpired: 'License Expired',
+ licenseExpiredTip: 'The Dify Enterprise license for your workspace has expired. Please contact your administrator to continue using Dify.',
+ licenseLost: 'License Lost',
+ licenseLostTip: 'Failed to connect Dify license server. Please contact your administrator to continue using Dify.',
+ licenseInactive: 'License Inactive',
+ licenseInactiveTip: 'The Dify Enterprise license for your workspace is inactive. Please contact your administrator to continue using Dify.',
}
export default translation
diff --git a/web/i18n/es-ES/common.ts b/web/i18n/es-ES/common.ts
index c2cef9afcd..8eba3258c8 100644
--- a/web/i18n/es-ES/common.ts
+++ b/web/i18n/es-ES/common.ts
@@ -591,6 +591,10 @@ const translation = {
pasteFileLinkInputPlaceholder: 'Introduzca la URL...',
uploadFromComputerLimit: 'El archivo de carga no puede exceder {{size}}',
},
+ license: {
+ expiring: 'Caduca en un día',
+ expiring_plural: 'Caducando en {{count}} días',
+ },
}
export default translation
diff --git a/web/i18n/es-ES/login.ts b/web/i18n/es-ES/login.ts
index 3a6debbe48..bb465ac1be 100644
--- a/web/i18n/es-ES/login.ts
+++ b/web/i18n/es-ES/login.ts
@@ -99,6 +99,12 @@ const translation = {
noLoginMethod: 'Método de autenticación no configurado',
setYourAccount: 'Configura tu cuenta',
noLoginMethodTip: 'Póngase en contacto con el administrador del sistema para agregar un método de autenticación.',
+ licenseInactive: 'Licencia inactiva',
+ licenseInactiveTip: 'La licencia de Dify Enterprise para su espacio de trabajo está inactiva. Póngase en contacto con su administrador para seguir utilizando Dify.',
+ licenseExpired: 'Licencia caducada',
+ licenseLost: 'Licencia perdida',
+ licenseExpiredTip: 'La licencia de Dify Enterprise para su espacio de trabajo ha caducado. Póngase en contacto con su administrador para seguir utilizando Dify.',
+ licenseLostTip: 'No se pudo conectar el servidor de licencias de Dife. Póngase en contacto con su administrador para seguir utilizando Dify.',
}
export default translation
diff --git a/web/i18n/es-ES/workflow.ts b/web/i18n/es-ES/workflow.ts
index 6c9af49c4d..4b3da57d7c 100644
--- a/web/i18n/es-ES/workflow.ts
+++ b/web/i18n/es-ES/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
type: 'Tipo',
binaryFileVariable: 'Variable de archivo binario',
+ extractListPlaceholder: 'Introduzca el índice de elementos de la lista, escriba \'/\' insertar variable',
},
code: {
inputVars: 'Variables de entrada',
@@ -621,6 +622,7 @@ const translation = {
filterConditionComparisonOperator: 'Operador de comparación de condiciones de filtro',
asc: 'ASC',
selectVariableKeyPlaceholder: 'Seleccione la clave de subvariable',
+ extractsCondition: 'Extraiga el elemento N',
},
},
tracing: {
diff --git a/web/i18n/fa-IR/common.ts b/web/i18n/fa-IR/common.ts
index 9ec9332ce8..969c134be7 100644
--- a/web/i18n/fa-IR/common.ts
+++ b/web/i18n/fa-IR/common.ts
@@ -591,6 +591,10 @@ const translation = {
pasteFileLink: 'پیوند فایل را جایگذاری کنید',
uploadFromComputerLimit: 'آپلود فایل نمی تواند از {{size}} تجاوز کند',
},
+ license: {
+ expiring_plural: 'انقضا در {{count}} روز',
+ expiring: 'انقضا در یک روز',
+ },
}
export default translation
diff --git a/web/i18n/fa-IR/login.ts b/web/i18n/fa-IR/login.ts
index 0f2fe9464a..7394ab325f 100644
--- a/web/i18n/fa-IR/login.ts
+++ b/web/i18n/fa-IR/login.ts
@@ -99,6 +99,12 @@ const translation = {
noLoginMethod: 'روش احراز هویت پیکربندی نشده است',
noLoginMethodTip: 'لطفا برای افزودن روش احراز هویت با مدیر سیستم تماس بگیرید.',
resetPasswordDesc: 'ایمیلی را که برای ثبت نام در Dify استفاده کرده اید تایپ کنید و ما یک ایمیل بازنشانی رمز عبور برای شما ارسال خواهیم کرد.',
+ licenseInactive: 'مجوز غیر فعال',
+ licenseLost: 'مجوز گم شده است',
+ licenseExpired: 'مجوز منقضی شده است',
+ licenseExpiredTip: 'مجوز Dify Enterprise برای فضای کاری شما منقضی شده است. لطفا برای ادامه استفاده از Dify با سرپرست خود تماس بگیرید.',
+ licenseInactiveTip: 'مجوز Dify Enterprise برای فضای کاری شما غیرفعال است. لطفا برای ادامه استفاده از Dify با سرپرست خود تماس بگیرید.',
+ licenseLostTip: 'اتصال سرور مجوز Dify انجام نشد. لطفا برای ادامه استفاده از Dify با سرپرست خود تماس بگیرید.',
}
export default translation
diff --git a/web/i18n/fa-IR/workflow.ts b/web/i18n/fa-IR/workflow.ts
index 4b00390663..c23cf8f62e 100644
--- a/web/i18n/fa-IR/workflow.ts
+++ b/web/i18n/fa-IR/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
binaryFileVariable: 'متغیر فایل باینری',
type: 'نوع',
+ extractListPlaceholder: 'فهرست آیتم لیست را وارد کنید، متغیر درج \'/\' را تایپ کنید',
},
code: {
inputVars: 'متغیرهای ورودی',
@@ -618,6 +619,7 @@ const translation = {
filterConditionComparisonValue: 'مقدار شرایط فیلتر',
selectVariableKeyPlaceholder: 'کلید متغیر فرعی را انتخاب کنید',
asc: 'صعودی',
+ extractsCondition: 'مورد N را استخراج کنید',
},
},
tracing: {
diff --git a/web/i18n/fr-FR/common.ts b/web/i18n/fr-FR/common.ts
index f6b5b62b30..62b5a1ca10 100644
--- a/web/i18n/fr-FR/common.ts
+++ b/web/i18n/fr-FR/common.ts
@@ -591,6 +591,10 @@ const translation = {
pasteFileLinkInvalid: 'Lien de fichier non valide',
uploadFromComputerLimit: 'Le fichier de téléchargement ne peut pas dépasser {{size}}',
},
+ license: {
+ expiring: 'Expirant dans un jour',
+ expiring_plural: 'Expirant dans {{count}} jours',
+ },
}
export default translation
diff --git a/web/i18n/fr-FR/login.ts b/web/i18n/fr-FR/login.ts
index 2f59b8afef..a7a633f330 100644
--- a/web/i18n/fr-FR/login.ts
+++ b/web/i18n/fr-FR/login.ts
@@ -99,6 +99,12 @@ const translation = {
noLoginMethodTip: 'Veuillez contacter l’administrateur système pour ajouter une méthode d’authentification.',
resetPasswordDesc: 'Tapez l’adresse e-mail que vous avez utilisée pour vous inscrire sur Dify et nous vous enverrons un e-mail de réinitialisation de mot de passe.',
usePassword: 'Utiliser le mot de passe',
+ licenseInactiveTip: 'La licence Dify Enterprise de votre espace de travail est inactive. Veuillez contacter votre administrateur pour continuer à utiliser Dify.',
+ licenseLostTip: 'Échec de la connexion au serveur de licences Dify. Veuillez contacter votre administrateur pour continuer à utiliser Dify.',
+ licenseExpired: 'Licence expirée',
+ licenseLost: 'Licence perdue',
+ licenseExpiredTip: 'La licence Dify Enterprise de votre espace de travail a expiré. Veuillez contacter votre administrateur pour continuer à utiliser Dify.',
+ licenseInactive: 'Licence inactive',
}
export default translation
diff --git a/web/i18n/fr-FR/workflow.ts b/web/i18n/fr-FR/workflow.ts
index e736e2cb07..dd41c8f9d1 100644
--- a/web/i18n/fr-FR/workflow.ts
+++ b/web/i18n/fr-FR/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
binaryFileVariable: 'Variable de fichier binaire',
type: 'Type',
+ extractListPlaceholder: 'Entrez l’index de l’élément de liste, tapez \'/\' insérer la variable',
},
code: {
inputVars: 'Variables de saisie',
@@ -618,6 +619,7 @@ const translation = {
limit: 'Haut N',
orderBy: 'Trier par',
filterConditionKey: 'Clé de condition de filtre',
+ extractsCondition: 'Extraire l’élément N',
},
},
tracing: {
diff --git a/web/i18n/hi-IN/common.ts b/web/i18n/hi-IN/common.ts
index 80deee1f94..569ee0a1af 100644
--- a/web/i18n/hi-IN/common.ts
+++ b/web/i18n/hi-IN/common.ts
@@ -613,6 +613,10 @@ const translation = {
fileExtensionNotSupport: 'फ़ाइल एक्सटेंशन समर्थित नहीं है',
uploadFromComputer: 'स्थानीय अपलोड',
},
+ license: {
+ expiring: 'एक दिन में समाप्त हो रहा है',
+ expiring_plural: '{{गिनती}} दिनों में समाप्त हो रहा है',
+ },
}
export default translation
diff --git a/web/i18n/hi-IN/login.ts b/web/i18n/hi-IN/login.ts
index e3ad60d572..0be8cbc3ab 100644
--- a/web/i18n/hi-IN/login.ts
+++ b/web/i18n/hi-IN/login.ts
@@ -104,6 +104,12 @@ const translation = {
resetPasswordDesc: 'वह ईमेल टाइप करें जिसका उपयोग आपने Dify पर साइन अप करने के लिए किया था और हम आपको एक पासवर्ड रीसेट ईमेल भेजेंगे।',
withSSO: 'एसएसओ के साथ जारी रखें',
back: 'पीछे',
+ licenseInactive: 'लाइसेंस निष्क्रिय',
+ licenseExpired: 'लाइसेंस की समय सीमा समाप्त हो गई',
+ licenseLost: 'लाइसेंस खो गया',
+ licenseLostTip: 'Dify लायसेंस सर्वर से कनेक्ट करने में विफल. Dify का उपयोग जारी रखने के लिए कृपया अपने व्यवस्थापक से संपर्क करें.',
+ licenseInactiveTip: 'आपके कार्यस्थल के लिए डिफाई एंटरप्राइज लाइसेंस निष्क्रिय है। कृपया डिफाई का उपयोग जारी रखने के लिए अपने प्रशासक से संपर्क करें।',
+ licenseExpiredTip: 'आपके कार्यस्थल के लिए डिफाई एंटरप्राइज लाइसेंस समाप्त हो गया है। कृपया डिफाई का उपयोग जारी रखने के लिए अपने प्रशासक से संपर्क करें।',
}
export default translation
diff --git a/web/i18n/hi-IN/workflow.ts b/web/i18n/hi-IN/workflow.ts
index 4112643488..70ddf1145f 100644
--- a/web/i18n/hi-IN/workflow.ts
+++ b/web/i18n/hi-IN/workflow.ts
@@ -420,6 +420,7 @@ const translation = {
},
type: 'प्रकार',
binaryFileVariable: 'बाइनरी फ़ाइल चर',
+ extractListPlaceholder: 'सूची आइटम इंडेक्स दर्ज करें, \'/\' इन्सर्ट वेरिएबल टाइप करें',
},
code: {
inputVars: 'इनपुट वेरिएबल्स',
@@ -638,6 +639,7 @@ const translation = {
filterConditionComparisonOperator: 'फ़िल्टर शर्त तुलन ऑपरेटर',
selectVariableKeyPlaceholder: 'उप चर कुंजी का चयन करें',
inputVar: 'इनपुट वेरिएबल',
+ extractsCondition: 'N आइटम निकालें',
},
},
tracing: {
diff --git a/web/i18n/it-IT/common.ts b/web/i18n/it-IT/common.ts
index 46586179ec..971c292852 100644
--- a/web/i18n/it-IT/common.ts
+++ b/web/i18n/it-IT/common.ts
@@ -622,6 +622,10 @@ const translation = {
pasteFileLink: 'Incolla il collegamento del file',
uploadFromComputerReadError: 'Lettura del file non riuscita, riprovare.',
},
+ license: {
+ expiring_plural: 'Scadenza tra {{count}} giorni',
+ expiring: 'Scadenza in un giorno',
+ },
}
export default translation
diff --git a/web/i18n/it-IT/login.ts b/web/i18n/it-IT/login.ts
index eb547ec661..350424259e 100644
--- a/web/i18n/it-IT/login.ts
+++ b/web/i18n/it-IT/login.ts
@@ -109,6 +109,12 @@ const translation = {
resetPasswordDesc: 'Digita l\'e-mail che hai utilizzato per registrarti su Dify e ti invieremo un\'e-mail per reimpostare la password.',
noLoginMethodTip: 'Contatta l\'amministratore di sistema per aggiungere un metodo di autenticazione.',
enterYourName: 'Inserisci il tuo nome utente',
+ licenseLostTip: 'Impossibile connettersi al server licenze Dify. Contatta il tuo amministratore per continuare a utilizzare Dify.',
+ licenseExpired: 'Licenza scaduta',
+ licenseLost: 'Licenza persa',
+ licenseExpiredTip: 'La licenza Dify Enterprise per la tua area di lavoro è scaduta. Contatta il tuo amministratore per continuare a utilizzare Dify.',
+ licenseInactiveTip: 'La licenza Dify Enterprise per la tua area di lavoro è inattiva. Contatta il tuo amministratore per continuare a utilizzare Dify.',
+ licenseInactive: 'Licenza inattiva',
}
export default translation
diff --git a/web/i18n/it-IT/workflow.ts b/web/i18n/it-IT/workflow.ts
index 756fb665af..db617425e1 100644
--- a/web/i18n/it-IT/workflow.ts
+++ b/web/i18n/it-IT/workflow.ts
@@ -424,6 +424,7 @@ const translation = {
},
binaryFileVariable: 'Variabile file binario',
type: 'Digitare',
+ extractListPlaceholder: 'Inserisci l\'indice delle voci dell\'elenco, digita \'/\' inserisci la variabile',
},
code: {
inputVars: 'Variabili di Input',
@@ -645,6 +646,7 @@ const translation = {
desc: 'DESC',
filterConditionComparisonValue: 'Valore della condizione di filtro',
orderBy: 'Ordina per',
+ extractsCondition: 'Estrai l\'elemento N',
},
},
tracing: {
diff --git a/web/i18n/ja-JP/common.ts b/web/i18n/ja-JP/common.ts
index 19f502c928..20182d8980 100644
--- a/web/i18n/ja-JP/common.ts
+++ b/web/i18n/ja-JP/common.ts
@@ -591,6 +591,10 @@ const translation = {
fileExtensionNotSupport: 'ファイル拡張子はサポートされていません',
pasteFileLinkInvalid: '無効なファイルリンク',
},
+ license: {
+ expiring_plural: '有効期限 {{count}} 日',
+ expiring: '1日で有効期限が切れます',
+ },
}
export default translation
diff --git a/web/i18n/ja-JP/login.ts b/web/i18n/ja-JP/login.ts
index 178c2617ae..fe4510686b 100644
--- a/web/i18n/ja-JP/login.ts
+++ b/web/i18n/ja-JP/login.ts
@@ -99,6 +99,12 @@ const translation = {
sendVerificationCode: '確認コードの送信',
enterYourName: 'ユーザー名を入力してください',
resetPasswordDesc: 'Difyへのサインアップに使用したメールアドレスを入力すると、パスワードリセットメールが送信されます。',
+ licenseLost: 'ライセンスを失った',
+ licenseExpiredTip: 'ワークスペースの Dify Enterprise ライセンスの有効期限が切れています。Difyを引き続き使用するには、管理者に連絡してください。',
+ licenseInactive: 'ライセンスが非アクティブです',
+ licenseInactiveTip: 'ワークスペースの Dify Enterprise ライセンスが非アクティブです。Difyを引き続き使用するには、管理者に連絡してください。',
+ licenseExpired: 'ライセンスの有効期限が切れています',
+ licenseLostTip: 'Difyライセンスサーバーへの接続に失敗しました。続けてDifyを使用するために管理者に連絡してください。',
}
export default translation
diff --git a/web/i18n/ja-JP/workflow.ts b/web/i18n/ja-JP/workflow.ts
index a82ba71e48..95e667de4a 100644
--- a/web/i18n/ja-JP/workflow.ts
+++ b/web/i18n/ja-JP/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
type: 'タイプ',
binaryFileVariable: 'バイナリファイル変数',
+ extractListPlaceholder: 'リスト項目のインデックスを入力し、変数を挿入 \'/\' と入力します',
},
code: {
inputVars: '入力変数',
@@ -619,6 +620,7 @@ const translation = {
filterConditionComparisonOperator: 'フィルター条件を比較オペレーター',
inputVar: '入力変数',
desc: 'DESC',
+ extractsCondition: 'N個のアイテムを抽出します',
},
},
tracing: {
diff --git a/web/i18n/ko-KR/common.ts b/web/i18n/ko-KR/common.ts
index 43e7402bd4..a5a697b2e5 100644
--- a/web/i18n/ko-KR/common.ts
+++ b/web/i18n/ko-KR/common.ts
@@ -587,6 +587,10 @@ const translation = {
uploadFromComputerLimit: '업로드 파일은 {{size}}를 초과할 수 없습니다.',
uploadFromComputerUploadError: '파일 업로드에 실패했습니다. 다시 업로드하십시오.',
},
+ license: {
+ expiring_plural: '{{count}}일 후에 만료',
+ expiring: '하루 후에 만료',
+ },
}
export default translation
diff --git a/web/i18n/ko-KR/login.ts b/web/i18n/ko-KR/login.ts
index a338ce5ed7..05a60c7b68 100644
--- a/web/i18n/ko-KR/login.ts
+++ b/web/i18n/ko-KR/login.ts
@@ -99,6 +99,12 @@ const translation = {
enterYourName: '사용자 이름을 입력해 주세요',
noLoginMethodTip: '인증 방법을 추가하려면 시스템 관리자에게 문의하십시오.',
resetPasswordDesc: 'Dify에 가입할 때 사용한 이메일을 입력하면 비밀번호 재설정 이메일을 보내드립니다.',
+ licenseInactiveTip: '작업 영역에 대한 Dify Enterprise 라이선스가 비활성 상태입니다. Dify를 계속 사용하려면 관리자에게 문의하십시오.',
+ licenseLost: '라이센스 분실',
+ licenseLostTip: 'Dify 라이선스 서버에 연결하지 못했습니다. Dify를 계속 사용하려면 관리자에게 문의하십시오.',
+ licenseInactive: 'License Inactive(라이선스 비활성)',
+ licenseExpired: '라이센스가 만료되었습니다.',
+ licenseExpiredTip: '작업 영역에 대한 Dify Enterprise 라이선스가 만료되었습니다. Dify를 계속 사용하려면 관리자에게 문의하십시오.',
}
export default translation
diff --git a/web/i18n/ko-KR/workflow.ts b/web/i18n/ko-KR/workflow.ts
index 589831401c..158d407e2a 100644
--- a/web/i18n/ko-KR/workflow.ts
+++ b/web/i18n/ko-KR/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
type: '형',
binaryFileVariable: '바이너리 파일 변수',
+ extractListPlaceholder: '목록 항목 인덱스 입력, \'/\' 변수 삽입',
},
code: {
inputVars: '입력 변수',
@@ -618,6 +619,7 @@ const translation = {
orderBy: '정렬 기준',
selectVariableKeyPlaceholder: '하위 변수 키 선택',
filterConditionComparisonOperator: '필터 조건 비교 연산자',
+ extractsCondition: 'N 항목을 추출합니다.',
},
},
tracing: {
diff --git a/web/i18n/pl-PL/common.ts b/web/i18n/pl-PL/common.ts
index 0a0f7adb99..3dff3f5c27 100644
--- a/web/i18n/pl-PL/common.ts
+++ b/web/i18n/pl-PL/common.ts
@@ -609,6 +609,10 @@ const translation = {
fileExtensionNotSupport: 'Rozszerzenie pliku nie jest obsługiwane',
uploadFromComputer: 'Przesyłanie lokalne',
},
+ license: {
+ expiring_plural: 'Wygasa za {{count}} dni',
+ expiring: 'Wygasa w ciągu jednego dnia',
+ },
}
export default translation
diff --git a/web/i18n/pl-PL/login.ts b/web/i18n/pl-PL/login.ts
index 78f11f3a26..99719fe71a 100644
--- a/web/i18n/pl-PL/login.ts
+++ b/web/i18n/pl-PL/login.ts
@@ -104,6 +104,12 @@ const translation = {
or: 'LUB',
noLoginMethodTip: 'Skontaktuj się z administratorem systemu, aby dodać metodę uwierzytelniania.',
noLoginMethod: 'Nie skonfigurowano metody uwierzytelniania',
+ licenseLost: 'Utrata licencji',
+ licenseExpired: 'Licencja wygasła',
+ licenseInactive: 'Licencja nieaktywna',
+ licenseExpiredTip: 'Licencja Dify Enterprise dla Twojego obszaru roboczego wygasła. Skontaktuj się z administratorem, aby kontynuować korzystanie z Dify.',
+ licenseLostTip: 'Nie udało się nawiązać połączenia z serwerem licencji Dify. Skontaktuj się z administratorem, aby kontynuować korzystanie z Dify.',
+ licenseInactiveTip: 'Licencja Dify Enterprise dla Twojego obszaru roboczego jest nieaktywna. Skontaktuj się z administratorem, aby kontynuować korzystanie z Dify.',
}
export default translation
diff --git a/web/i18n/pl-PL/workflow.ts b/web/i18n/pl-PL/workflow.ts
index f118f7945c..021cfd2534 100644
--- a/web/i18n/pl-PL/workflow.ts
+++ b/web/i18n/pl-PL/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
type: 'Typ',
binaryFileVariable: 'Binarna zmienna pliku',
+ extractListPlaceholder: 'Wprowadź indeks elementu listy, wpisz "/" wstaw zmienną',
},
code: {
inputVars: 'Zmienne wejściowe',
@@ -618,6 +619,7 @@ const translation = {
filterCondition: 'Stan filtra',
filterConditionComparisonValue: 'Wartość warunku filtru',
selectVariableKeyPlaceholder: 'Wybierz klucz zmiennej podrzędnej',
+ extractsCondition: 'Wyodrębnij element N',
},
},
tracing: {
diff --git a/web/i18n/pt-BR/common.ts b/web/i18n/pt-BR/common.ts
index 431db81d24..d51b24e34d 100644
--- a/web/i18n/pt-BR/common.ts
+++ b/web/i18n/pt-BR/common.ts
@@ -591,6 +591,10 @@ const translation = {
uploadFromComputerLimit: 'Carregar arquivo não pode exceder {{size}}',
uploadFromComputerUploadError: 'Falha no upload do arquivo, faça o upload novamente.',
},
+ license: {
+ expiring: 'Expirando em um dia',
+ expiring_plural: 'Expirando em {{count}} dias',
+ },
}
export default translation
diff --git a/web/i18n/pt-BR/login.ts b/web/i18n/pt-BR/login.ts
index 6c08de45b0..7af5181bb9 100644
--- a/web/i18n/pt-BR/login.ts
+++ b/web/i18n/pt-BR/login.ts
@@ -99,6 +99,12 @@ const translation = {
resetPasswordDesc: 'Digite o e-mail que você usou para se inscrever no Dify e enviaremos um e-mail de redefinição de senha.',
sendVerificationCode: 'Enviar código de verificação',
usePassword: 'Usar senha',
+ licenseInactiveTip: 'A licença do Dify Enterprise para seu espaço de trabalho está inativa. Entre em contato com o administrador para continuar usando o Dify.',
+ licenseLostTip: 'Falha ao conectar o servidor de licenças Dify. Entre em contato com o administrador para continuar usando o Dify.',
+ licenseExpired: 'Licença expirada',
+ licenseLost: 'Licença perdida',
+ licenseInactive: 'Licença inativa',
+ licenseExpiredTip: 'A licença do Dify Enterprise para seu espaço de trabalho expirou. Entre em contato com o administrador para continuar usando o Dify.',
}
export default translation
diff --git a/web/i18n/pt-BR/workflow.ts b/web/i18n/pt-BR/workflow.ts
index 44afda5cd4..de3f10ff6e 100644
--- a/web/i18n/pt-BR/workflow.ts
+++ b/web/i18n/pt-BR/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
type: 'Tipo',
binaryFileVariable: 'Variável de arquivo binário',
+ extractListPlaceholder: 'Insira o índice do item da lista, digite \'/\' inserir variável',
},
code: {
inputVars: 'Variáveis de entrada',
@@ -618,6 +619,7 @@ const translation = {
filterConditionKey: 'Chave de condição do filtro',
filterConditionComparisonOperator: 'Operador de comparação de condição de filtro',
filterConditionComparisonValue: 'Valor da condição do filtro',
+ extractsCondition: 'Extraia o item N',
},
},
tracing: {
diff --git a/web/i18n/ro-RO/common.ts b/web/i18n/ro-RO/common.ts
index 89b965db63..0b7baf37ab 100644
--- a/web/i18n/ro-RO/common.ts
+++ b/web/i18n/ro-RO/common.ts
@@ -591,6 +591,10 @@ const translation = {
uploadFromComputerLimit: 'Încărcarea fișierului nu poate depăși {{size}}',
pasteFileLink: 'Lipiți linkul fișierului',
},
+ license: {
+ expiring: 'Expiră într-o zi',
+ expiring_plural: 'Expiră în {{count}} zile',
+ },
}
export default translation
diff --git a/web/i18n/ro-RO/login.ts b/web/i18n/ro-RO/login.ts
index a60e367ea7..12878d46c0 100644
--- a/web/i18n/ro-RO/login.ts
+++ b/web/i18n/ro-RO/login.ts
@@ -99,6 +99,12 @@ const translation = {
or: 'SAU',
resetPasswordDesc: 'Tastați e-mailul pe care l-ați folosit pentru a vă înscrie pe Dify și vă vom trimite un e-mail de resetare a parolei.',
changePasswordBtn: 'Setați o parolă',
+ licenseLostTip: 'Nu s-a reușit conectarea serverului de licențe Dify. Contactați administratorul pentru a continua să utilizați Dify.',
+ licenseInactive: 'Licență inactivă',
+ licenseInactiveTip: 'Licența Dify Enterprise pentru spațiul de lucru este inactivă. Contactați administratorul pentru a continua să utilizați Dify.',
+ licenseExpired: 'Licență expirată',
+ licenseLost: 'Licență pierdută',
+ licenseExpiredTip: 'Licența Dify Enterprise pentru spațiul de lucru a expirat. Contactați administratorul pentru a continua să utilizați Dify.',
}
export default translation
diff --git a/web/i18n/ro-RO/workflow.ts b/web/i18n/ro-RO/workflow.ts
index d8cd84f730..276ebefa90 100644
--- a/web/i18n/ro-RO/workflow.ts
+++ b/web/i18n/ro-RO/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
type: 'Tip',
binaryFileVariable: 'Variabilă de fișier binar',
+ extractListPlaceholder: 'Introduceți indexul elementelor din listă, tastați "/" inserați variabila',
},
code: {
inputVars: 'Variabile de intrare',
@@ -618,6 +619,7 @@ const translation = {
limit: 'N de sus',
filterConditionComparisonValue: 'Valoare Stare filtrare',
asc: 'ASC',
+ extractsCondition: 'Extrageți elementul N',
},
},
tracing: {
diff --git a/web/i18n/ru-RU/common.ts b/web/i18n/ru-RU/common.ts
index f383b53479..8df7c1ae55 100644
--- a/web/i18n/ru-RU/common.ts
+++ b/web/i18n/ru-RU/common.ts
@@ -591,6 +591,10 @@ const translation = {
uploadFromComputerLimit: 'Файл загрузки не может превышать {{size}}',
uploadFromComputerUploadError: 'Загрузка файла не удалась, пожалуйста, загрузите еще раз.',
},
+ license: {
+ expiring: 'Срок действия истекает за один день',
+ expiring_plural: 'Срок действия истекает через {{count}} дней',
+ },
}
export default translation
diff --git a/web/i18n/ru-RU/login.ts b/web/i18n/ru-RU/login.ts
index 7aba7c4cdd..5c46cb7ff9 100644
--- a/web/i18n/ru-RU/login.ts
+++ b/web/i18n/ru-RU/login.ts
@@ -99,6 +99,12 @@ const translation = {
noLoginMethodTip: 'Обратитесь к системному администратору, чтобы добавить метод аутентификации.',
resetPasswordDesc: 'Введите адрес электронной почты, который вы использовали для регистрации в Dify, и мы отправим вам электронное письмо для сброса пароля.',
or: 'ИЛИ',
+ licenseInactive: 'Лицензия неактивна',
+ licenseLostTip: 'Не удалось подключить сервер лицензий Dify. Обратитесь к своему администратору, чтобы продолжить использование Dify.',
+ licenseExpired: 'Срок действия лицензии истек',
+ licenseLost: 'Утеряна лицензия',
+ licenseInactiveTip: 'Лицензия Dify Enterprise для рабочего пространства неактивна. Обратитесь к своему администратору, чтобы продолжить использование Dify.',
+ licenseExpiredTip: 'Срок действия лицензии Dify Enterprise для рабочего пространства истек. Обратитесь к своему администратору, чтобы продолжить использование Dify.',
}
export default translation
diff --git a/web/i18n/ru-RU/workflow.ts b/web/i18n/ru-RU/workflow.ts
index c822f8c3e5..2fdb6a5f3b 100644
--- a/web/i18n/ru-RU/workflow.ts
+++ b/web/i18n/ru-RU/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
type: 'Тип',
binaryFileVariable: 'Переменная двоичного файла',
+ extractListPlaceholder: 'Введите индекс элемента списка, введите \'/\' вставьте переменную',
},
code: {
inputVars: 'Входные переменные',
@@ -618,6 +619,7 @@ const translation = {
filterConditionKey: 'Ключ условия фильтра',
selectVariableKeyPlaceholder: 'Выбор ключа подпеременной',
filterConditionComparisonValue: 'Значение условия фильтра',
+ extractsCondition: 'Извлечение элемента N',
},
},
tracing: {
diff --git a/web/i18n/sl-SI/common.ts b/web/i18n/sl-SI/common.ts
index c780fbb6c2..2bd148397d 100644
--- a/web/i18n/sl-SI/common.ts
+++ b/web/i18n/sl-SI/common.ts
@@ -38,6 +38,10 @@ const translation = {
duplicate: 'Podvoji',
rename: 'Preimenuj',
audioSourceUnavailable: 'Zvočni vir ni na voljo',
+ copyImage: 'Kopiraj sliko',
+ openInNewTab: 'Odpri v novem zavihku',
+ zoomOut: 'Pomanjšanje',
+ zoomIn: 'Povečava',
},
errorMsg: {
fieldRequired: '{{field}} je obvezno',
@@ -576,5 +580,220 @@ const translation = {
failed: 'Ustvarjanje oznake ni uspelo',
},
},
+ dataSource: {
+ notion: {
+ selector: {
+ pageSelected: 'Izbrane strani',
+ addPages: 'Dodajanje strani',
+ searchPages: 'Iskanje strani ...',
+ noSearchResult: 'Ni rezultatov iskanja',
+ preview: 'PREDOGLED',
+ },
+ connected: 'Povezani',
+ remove: 'Odstrani',
+ addWorkspace: 'Dodajanje delovnega prostora',
+ connectedWorkspace: 'Povezani delovni prostor',
+ description: 'Uporaba pojma kot vira podatkov za znanje.',
+ disconnected: 'Odklopi',
+ pagesAuthorized: 'Dovoljene strani',
+ title: 'Pojem',
+ changeAuthorizedPages: 'Spreminjanje pooblaščenih strani',
+ sync: 'Sinhroniziranje',
+ },
+ website: {
+ active: 'Dejaven',
+ configuredCrawlers: 'Konfigurirani pajki',
+ title: 'Spletna stran',
+ inactive: 'Neaktiven',
+ description: 'Uvozite vsebino s spletnih mest s spletnim pajkom.',
+ with: 'S',
+ },
+ add: 'Dodajanje vira podatkov',
+ connect: 'Povezati',
+ configure: 'Konfigurirati',
+ },
+ plugin: {
+ serpapi: {
+ apiKeyPlaceholder: 'Vnesite ključ API',
+ apiKey: 'API ključ',
+ keyFrom: 'Pridobite svoj ključ SerpAPI na strani računa SerpAPI',
+ },
+ },
+ apiBasedExtension: {
+ selector: {
+ placeholder: 'Prosimo, izberite razširitev API-ja',
+ manage: 'Upravljanje razširitve API',
+ title: 'Razširitev API-ja',
+ },
+ modal: {
+ name: {
+ placeholder: 'Prosimo, vnesite ime',
+ title: 'Ime',
+ },
+ apiEndpoint: {
+ title: 'Končna točka API-ja',
+ placeholder: 'Prosimo, vnesite končno točko API-ja',
+ },
+ apiKey: {
+ lengthError: 'Dolžina ključa API ne sme biti manjša od 5 znakov',
+ title: 'Ključ API-ja',
+ placeholder: 'Prosimo, vnesite API-ključ',
+ },
+ editTitle: 'Uredi razširitev API-ja',
+ title: 'Dodajanje razširitve API-ja',
+ },
+ type: 'Vrsta',
+ link: 'Preberite, kako razvijete lastno razširitev API-ja.',
+ title: 'Razširitve API zagotavljajo centralizirano upravljanje API, kar poenostavlja konfiguracijo za enostavno uporabo v aplikacijah Dify.',
+ linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ add: 'Dodajanje razširitve API-ja',
+ },
+ about: {
+ updateNow: 'Posodobi zdaj',
+ nowAvailable: 'Dify {{version}} je zdaj na voljo.',
+ latestAvailable: 'Dify {{version}} je najnovejša različica, ki je na voljo.',
+ changeLog: 'Dnevnik sprememb',
+ },
+ appMenus: {
+ apiAccess: 'Dostop do API-ja',
+ logs: 'Dnevniki',
+ logAndAnn: 'Dnevniki & Ann.',
+ promptEng: 'Orkester',
+ overview: 'Spremljanje',
+ },
+ environment: {
+ development: 'RAZVOJ',
+ testing: 'PREIZKUŠANJE',
+ },
+ appModes: {
+ completionApp: 'Generator besedila',
+ chatApp: 'Aplikacija za klepet',
+ },
+ datasetMenus: {
+ documents: 'Dokumentov',
+ settings: 'Nastavitve',
+ hitTesting: 'Testiranje pridobivanja',
+ emptyTip: 'Znanje ni bilo povezano, prosimo, pojdite na aplikacijo ali vtičnik, da dokončate združenje.',
+ viewDoc: 'Oglejte si dokumentacijo',
+ relatedApp: 'Povezane aplikacije',
+ },
+ voiceInput: {
+ notAllow: 'Mikrofon ni pooblaščen',
+ speaking: 'Spregovorite zdaj ...',
+ converting: 'Pretvorba v besedilo ...',
+ },
+ modelName: {
+ 'claude-2': 'Claude-2',
+ 'gpt-4-32k': 'GPT-4-32K',
+ 'text-embedding-ada-002': 'Vdelava besedila-Ada-002',
+ 'gpt-4': 'GPT-4',
+ 'whisper-1': 'Šepet-1',
+ 'claude-instant-1': 'Claude-Instant',
+ 'text-davinci-003': 'Besedilo-Davinci-003',
+ 'gpt-3.5-turbo-16k': 'GPT-3.5-Turbo-16K',
+ 'gpt-3.5-turbo': 'GPT-3.5-Turbo',
+ },
+ chat: {
+ citation: {
+ vectorHash: 'Vektorska razpršitev:',
+ hitScore: 'Rezultat pridobivanja:',
+ linkToDataset: 'Povezava do znanja',
+ hitCount: 'Število pridobivanja:',
+ characters: 'Znakov:',
+ title: 'CITATI',
+ },
+ conversationNameCanNotEmpty: 'Zahtevano ime pogovora',
+ inputPlaceholder: 'Pogovorite se z botom',
+ renameConversation: 'Preimenovanje pogovora',
+ conversationName: 'Ime pogovora',
+ conversationNamePlaceholder: 'Prosimo, vnesite ime pogovora',
+ },
+ promptEditor: {
+ context: {
+ item: {
+ desc: 'Vstavljanje predloge konteksta',
+ title: 'Kontekstu',
+ },
+ modal: {
+ footer: 'Kontekste lahko upravljate v spodnjem razdelku Kontekst.',
+ title: '{{num}} Znanje v kontekstu',
+ add: 'Dodajanje konteksta',
+ },
+ },
+ history: {
+ item: {
+ desc: 'Vstavljanje predloge zgodovinskega sporočila',
+ title: 'Zgodovina pogovorov',
+ },
+ modal: {
+ title: 'PRIMER',
+ edit: 'Urejanje imen vlog v pogovoru',
+ assistant: 'Zdravo! Kako vam lahko pomagam danes?',
+ user: 'Zdravo',
+ },
+ },
+ variable: {
+ item: {
+ desc: 'Vstavljanje spremenljivk in zunanjih orodij',
+ title: 'Spremenljivke in zunanja orodja',
+ },
+ outputToolDisabledItem: {
+ title: 'Spremenljivke',
+ desc: 'Vstavljanje spremenljivk',
+ },
+ modal: {
+ addTool: 'Novo orodje',
+ add: 'Nova spremenljivka',
+ },
+ },
+ query: {
+ item: {
+ title: 'Poizvedba',
+ desc: 'Vstavljanje predloge uporabniške poizvedbe',
+ },
+ },
+ existed: 'Že obstaja v pozivu',
+ placeholder: 'Tukaj vnesite svojo pozivno besedo, vnesite \'{\' za vstavljanje spremenljivke, vnesite \'/\', da vstavite blok vsebine',
+ },
+ imageUploader: {
+ pasteImageLinkInvalid: 'Neveljavna povezava do slike',
+ uploadFromComputerLimit: 'Nalaganje slik ne sme presegati {{size}} MB',
+ uploadFromComputerUploadError: 'Nalaganje slike ni uspelo, naložite ga znova.',
+ pasteImageLink: 'Prilepi povezavo do slike',
+ imageUpload: 'Nalaganje slik',
+ uploadFromComputer: 'Naloži iz računalnika',
+ pasteImageLinkInputPlaceholder: 'Tukaj prilepi povezavo do slike',
+ uploadFromComputerReadError: 'Branje slik ni uspelo, poskusite znova.',
+ },
+ fileUploader: {
+ fileExtensionNotSupport: 'Datotečna pripona ni podprta',
+ pasteFileLinkInvalid: 'Neveljavna povezava do datoteke',
+ pasteFileLink: 'Prilepi povezavo do datoteke',
+ pasteFileLinkInputPlaceholder: 'Vnesite URL ...',
+ uploadFromComputerUploadError: 'Nalaganje datoteke ni uspelo, naložite ga znova.',
+ uploadFromComputer: 'Lokalno nalaganje',
+ uploadFromComputerLimit: 'Nalaganje {{type}} ne sme presegati {{size}}',
+ uploadFromComputerReadError: 'Branje datoteke ni uspelo, poskusite znova.',
+ },
+ tag: {
+ addTag: 'Dodajanje oznak',
+ delete: 'Brisanje oznake',
+ manageTags: 'Upravljanje oznak',
+ addNew: 'Dodajanje nove oznake',
+ placeholder: 'Vse oznake',
+ failed: 'Ustvarjanje oznake ni uspelo',
+ editTag: 'Urejanje oznak',
+ created: 'Oznaka je bila uspešno ustvarjena',
+ noTagYet: 'Še ni oznak',
+ create: 'Ustvariti',
+ deleteTip: 'Oznaka se uporablja, jo izbrišite?',
+ noTag: 'Brez oznak',
+ selectorPlaceholder: 'Vnesite za iskanje ali ustvarjanje',
+ },
+ license: {
+ expiring_plural: 'Poteče v {{count}} dneh',
+ expiring: 'Poteče v enem dnevu',
+ },
}
+
export default translation
diff --git a/web/i18n/sl-SI/dataset-creation.ts b/web/i18n/sl-SI/dataset-creation.ts
index 1b24313045..402066ad40 100644
--- a/web/i18n/sl-SI/dataset-creation.ts
+++ b/web/i18n/sl-SI/dataset-creation.ts
@@ -152,6 +152,7 @@ const translation = {
indexSettingTip: 'Če želite spremeniti način indeksiranja in model vdelave, pojdite na ',
retrievalSettingTip: 'Če želite spremeniti nastavitve iskanja, pojdite na ',
datasetSettingLink: 'nastavitve Znanja.',
+ maxLengthCheck: 'Največja dolžina kosa mora biti manjša od 4000',
},
stepThree: {
creationTitle: '🎉 Znanje ustvarjeno',
diff --git a/web/i18n/sl-SI/login.ts b/web/i18n/sl-SI/login.ts
index 13dfea984d..70350021bc 100644
--- a/web/i18n/sl-SI/login.ts
+++ b/web/i18n/sl-SI/login.ts
@@ -55,6 +55,7 @@ const translation = {
passwordEmpty: 'Geslo je obvezno',
passwordLengthInValid: 'Geslo mora vsebovati vsaj 8 znakov',
passwordInvalid: 'Geslo mora vsebovati črke in številke, dolžina pa mora biti več kot 8 znakov',
+ registrationNotAllowed: 'Računa ni mogoče najti. Za registracijo se obrnite na skrbnika sistema.',
},
license: {
tip: 'Preden začnete z Dify Community Edition, preberite GitHub',
@@ -70,6 +71,40 @@ const translation = {
activated: 'Prijavite se zdaj',
adminInitPassword: 'Geslo za inicializacijo administratorja',
validate: 'Potrdi',
+ checkCode: {
+ emptyCode: 'Koda je obvezna',
+ verificationCodePlaceholder: 'Vnesite 6-mestno kodo',
+ resend: 'Poslati',
+ verificationCode: 'Koda za preverjanje',
+ tips: 'Kodo za preverjanje pošljemo na {{email}}',
+ verify: 'Preveriti',
+ validTime: 'Upoštevajte, da je koda veljavna 5 minut',
+ checkYourEmail: 'Preverjanje e-pošte',
+ didNotReceiveCode: 'Niste prejeli kode?',
+ invalidCode: 'Neveljavna koda',
+ useAnotherMethod: 'Uporabite drug način',
+ },
+ useVerificationCode: 'Uporaba kode za preverjanje',
+ licenseInactive: 'Licenca je neaktivna',
+ changePasswordBtn: 'Nastavitev gesla',
+ licenseExpired: 'Licenca je potekla',
+ resetPassword: 'Ponastavi geslo',
+ back: 'Hrbet',
+ backToLogin: 'Nazaj na prijavo',
+ enterYourName: 'Prosimo, vnesite svoje uporabniško ime',
+ licenseLost: 'Izgubljena licenca',
+ licenseExpiredTip: 'Licenca za Dify Enterprise za vaš delovni prostor je potekla. Če želite še naprej uporabljati Dify, se obrnite na skrbnika.',
+ usePassword: 'Uporaba gesla',
+ sendVerificationCode: 'Pošlji kodo za preverjanje',
+ resetPasswordDesc: 'Vnesite e-poštni naslov, ki ste ga uporabili za prijavo na Dify, in poslali vam bomo e-poštno sporočilo za ponastavitev gesla.',
+ setYourAccount: 'Nastavitev računa',
+ noLoginMethodTip: 'Obrnite se na skrbnika sistema, da dodate način preverjanja pristnosti.',
+ or: 'ALI',
+ noLoginMethod: 'Način preverjanja pristnosti ni konfiguriran',
+ continueWithCode: 'Nadaljujte s kodo',
+ withSSO: 'Nadaljujte z enotno prijavo',
+ licenseLostTip: 'Povezava z licenčnim strežnikom Dify ni uspela. Če želite še naprej uporabljati Dify, se obrnite na skrbnika.',
+ licenseInactiveTip: 'Licenca Dify Enterprise za vaš delovni prostor je neaktivna. Če želite še naprej uporabljati Dify, se obrnite na skrbnika.',
}
export default translation
diff --git a/web/i18n/sl-SI/workflow.ts b/web/i18n/sl-SI/workflow.ts
index 767139b741..e5141f06fb 100644
--- a/web/i18n/sl-SI/workflow.ts
+++ b/web/i18n/sl-SI/workflow.ts
@@ -95,6 +95,10 @@ const translation = {
addParallelNode: 'Dodaj vzporedno vozlišče',
parallel: 'VZPOREDNO',
branch: 'VEJA',
+ fileUploadTip: 'Funkcije nalaganja slik so nadgrajene na nalaganje datotek.',
+ featuresDocLink: 'Izvedi več',
+ featuresDescription: 'Izboljšajte uporabniško izkušnjo spletne aplikacije',
+ ImageUploadLegacyTip: 'Zdaj lahko ustvarite spremenljivke vrste datoteke v začetnem obrazcu. V prihodnje ne bomo več podpirali funkcije nalaganja slik.',
},
env: {
envPanelTitle: 'Spremenljivke okolja',
@@ -554,6 +558,506 @@ const translation = {
tracing: {
stopBy: 'Ustavljeno s strani {{user}}',
},
+ chatVariable: {
+ modal: {
+ type: 'Vrsta',
+ objectValue: 'Privzeta vrednost',
+ description: 'Opis',
+ editTitle: 'Urejanje spremenljivke pogovora',
+ namePlaceholder: 'Ime spremenljivke',
+ valuePlaceholder: 'Privzeta vrednost, pustite prazno, da ni nastavljeno',
+ title: 'Dodajanje spremenljivke pogovora',
+ editInJSON: 'Urejanje v JSON',
+ value: 'Privzeta vrednost',
+ oneByOne: 'Dodajanje enega za drugim',
+ objectKey: 'Ključ',
+ objectType: 'Vrsta',
+ arrayValue: 'Vrednost',
+ name: 'Ime',
+ descriptionPlaceholder: 'Opis spremenljivke',
+ editInForm: 'Uredi v obrazcu',
+ addArrayValue: 'Dodajanje vrednosti',
+ },
+ storedContent: 'Shranjena vsebina',
+ updatedAt: 'Posodobljeno na',
+ panelTitle: 'Spremenljivke pogovora',
+ button: 'Dodajanje spremenljivke',
+ panelDescription: 'Spremenljivke pogovora se uporabljajo za shranjevanje interaktivnih informacij, ki si jih mora LLM zapomniti, vključno z zgodovino pogovorov, naloženimi datotekami, uporabniškimi nastavitvami. So branje in pisanje.',
+ docLink: 'Če želite izvedeti več, obiščite naše dokumente.',
+ },
+ changeHistory: {
+ nodeChange: 'Blokiranje spremenjeno',
+ placeholder: 'Ničesar še niste spremenili',
+ nodeDescriptionChange: 'Opis bloka je bil spremenjen',
+ nodePaste: 'Blokiranje lepljenja',
+ noteDelete: 'Opomba izbrisana',
+ nodeDragStop: 'Blok premaknjen',
+ nodeConnect: 'Blok povezan',
+ sessionStart: 'Začetek seje',
+ nodeDelete: 'Blokiraj izbrisane',
+ stepBackward_other: '{{count}} stopi nazaj',
+ hint: 'Namig',
+ noteAdd: 'Opomba dodana',
+ clearHistory: 'Počisti zgodovino',
+ stepForward_one: '{{count}} korak naprej',
+ stepBackward_one: '{{count}} korak nazaj',
+ nodeAdd: 'Blokiranje dodano',
+ noteChange: 'Opomba spremenjena',
+ hintText: 'Dejanjem urejanja se sledi v zgodovini sprememb, ki je shranjena v napravi za čas trajanja te seje. Ta zgodovina bo izbrisana, ko zapustite urejevalnik.',
+ stepForward_other: '{{count}} koraki naprej',
+ edgeDelete: 'Blok je prekinjen.',
+ nodeTitleChange: 'Naslov bloka spremenjen',
+ nodeResize: 'Spremeni velikost bloka',
+ title: 'Zgodovina sprememb',
+ currentState: 'Trenutno stanje',
+ },
+ errorMsg: {
+ fields: {
+ code: 'Koda',
+ variableValue: 'Vrednost spremenljivke',
+ visionVariable: 'Spremenljivka vida',
+ model: 'Model',
+ rerankModel: 'Ponovno razvrsti model',
+ variable: 'Ime spremenljivke',
+ },
+ invalidJson: '{{field}} je neveljaven JSON',
+ invalidVariable: 'Neveljavna spremenljivka',
+ authRequired: 'Dovoljenje je potrebno',
+ fieldRequired: '{{field}} je obvezno',
+ rerankModelRequired: 'Preden vklopite Rerank Model, preverite, ali je bil model uspešno konfiguriran v nastavitvah.',
+ },
+ singleRun: {
+ startRun: 'Začni zagnati',
+ running: 'Tek',
+ testRunIteration: 'Ponovitev preskusnega zagona',
+ iteration: 'Ponovitev',
+ back: 'Hrbet',
+ testRun: 'Preskusni zagon',
+ },
+ tabs: {
+ 'blocks': 'Bloki',
+ 'workflowTool': 'Potek dela',
+ 'transform': 'Preoblikovanje',
+ 'question-understand': 'Vprašanje razumeti',
+ 'builtInTool': 'Vgrajeno',
+ 'allTool': 'Ves',
+ 'tools': 'Orodja',
+ 'logic': 'Logika',
+ 'searchBlock': 'Iskalni blok',
+ 'noResult': 'Ni najdenega ujemanja',
+ 'customTool': 'Običaj',
+ 'utilities': 'Utilities',
+ 'searchTool': 'Orodje za iskanje',
+ },
+ blocks: {
+ 'variable-aggregator': 'Spremenljivi agregator',
+ 'code': 'Koda',
+ 'parameter-extractor': 'Ekstraktor parametrov',
+ 'llm': 'LLM',
+ 'knowledge-retrieval': 'Pridobivanje znanja',
+ 'answer': 'Odgovoriti',
+ 'end': 'Konec',
+ 'document-extractor': 'Ekstraktor dokumentov',
+ 'assigner': 'Dodeljevalnik spremenljivke',
+ 'iteration-start': 'Začetek ponovitve',
+ 'template-transform': 'Predloga',
+ 'iteration': 'Ponovitev',
+ 'start': 'Začetek',
+ 'if-else': 'IF/ELSE',
+ 'list-operator': 'Operater seznama',
+ 'http-request': 'Zahteva HTTP',
+ 'variable-assigner': 'Spremenljivi agregator',
+ 'question-classifier': 'Klasifikator vprašanj',
+ },
+ blocksAbout: {
+ 'document-extractor': 'Uporablja se za razčlenjevanje naloženih dokumentov v besedilno vsebino, ki je zlahka razumljiva LLM.',
+ 'list-operator': 'Uporablja se za filtriranje ali razvrščanje vsebine matrike.',
+ 'template-transform': 'Pretvorite podatke v niz s sintakso predloge Jinja',
+ 'question-classifier': 'Določite pogoje razvrščanja uporabniških vprašanj, LLM lahko določi, kako poteka pogovor na podlagi opisa klasifikacije',
+ 'start': 'Določanje začetnih parametrov za zagon poteka dela',
+ 'if-else': 'Omogoča razdelitev poteka dela na dve veji glede na pogoje if/else',
+ 'knowledge-retrieval': 'Omogoča poizvedovanje po besedilni vsebini, ki je povezana z uporabniškimi vprašanji iz zbirke znanja',
+ 'variable-assigner': 'Združite spremenljivke z več vejami v eno spremenljivko za poenoteno konfiguracijo nadaljnjih vozlišč.',
+ 'code': 'Izvedite kodo Python ali NodeJS za izvajanje logike po meri',
+ 'answer': 'Določanje vsebine odgovora v pogovoru v klepetu',
+ 'iteration': 'Izvedite več korakov na predmetu seznama, dokler niso prikazani vsi rezultati.',
+ 'http-request': 'Dovoli pošiljanje zahtev strežnika prek protokola HTTP',
+ 'end': 'Določanje končne in končne vrste poteka dela',
+ 'variable-aggregator': 'Združite spremenljivke z več vejami v eno spremenljivko za poenoteno konfiguracijo nadaljnjih vozlišč.',
+ 'parameter-extractor': 'Uporabite LLM za pridobivanje strukturiranih parametrov iz naravnega jezika za klicanje orodij ali zahteve HTTP.',
+ 'assigner': 'Vozlišče za dodeljevanje spremenljivk se uporablja za dodeljevanje vrednosti zapisljivim spremenljivkam (kot so spremenljivke pogovora).',
+ 'llm': 'Sklicevanje na velike jezikovne modele za odgovarjanje na vprašanja ali obdelavo naravnega jezika',
+ },
+ operator: {
+ zoomOut: 'Pomanjšanje',
+ zoomTo100: 'Povečava na 100 %',
+ zoomToFit: 'Povečaj, da se prilega',
+ zoomIn: 'Povečava',
+ zoomTo50: 'Povečava na 50%',
+ },
+ panel: {
+ helpLink: 'Povezava za pomoč',
+ organizeBlocks: 'Organiziranje blokov',
+ optional: '(neobvezno)',
+ nextStep: 'Naslednji korak',
+ checklist: 'Kontrolni seznam',
+ runThisStep: 'Zaženite ta korak',
+ about: 'Približno',
+ selectNextStep: 'Izberite Naslednji blok',
+ changeBlock: 'Spremeni blok',
+ createdBy: 'Ustvaril',
+ checklistTip: 'Pred objavo se prepričajte, da so vse težave odpravljene',
+ userInputField: 'Uporabniško polje za vnos',
+ checklistResolved: 'Vse težave so odpravljene',
+ addNextStep: 'Dodajanje naslednjega bloka v ta potek dela',
+ change: 'Spremeniti',
+ },
+ nodes: {
+ common: {
+ memory: {
+ conversationRoleName: 'Ime vloge pogovora',
+ memoryTip: 'Nastavitve pomnilnika klepeta',
+ assistant: 'Predpona pomočnika',
+ user: 'Uporabniška predpona',
+ memory: 'Spomin',
+ windowSize: 'Velikost okna',
+ },
+ memories: {
+ tip: 'Pomnilnik klepeta',
+ title: 'Spomine',
+ builtIn: 'Vgrajeno',
+ },
+ outputVars: 'Izhodne spremenljivke',
+ insertVarTip: 'Vstavi spremenljivko',
+ },
+ start: {
+ outputVars: {
+ memories: {
+ content: 'Vsebina sporočila',
+ des: 'Zgodovina pogovorov',
+ type: 'Vrsta sporočila',
+ },
+ query: 'Uporabniški vnos',
+ files: 'Seznam datotek',
+ },
+ required: 'Zahteva',
+ inputField: 'Vnosno polje',
+ noVarTip: 'Nastavitev vhodov, ki jih je mogoče uporabiti v poteku dela',
+ builtInVar: 'Vgrajene spremenljivke',
+ },
+ end: {
+ output: {
+ variable: 'izhodna spremenljivka',
+ type: 'Vrsta izhoda',
+ },
+ type: {
+ 'structured': 'Strukturiran',
+ 'plain-text': 'Navadno besedilo',
+ 'none': 'Nobena',
+ },
+ outputs: 'Izhodov',
+ },
+ answer: {
+ answer: 'Odgovoriti',
+ outputVars: 'Izhodne spremenljivke',
+ },
+ llm: {
+ roleDescription: {
+ assistant: 'Odgovori modela na podlagi sporočil uporabnikov',
+ system: 'Podajte navodila na visoki ravni za pogovor',
+ user: 'Navedite navodila, poizvedbe ali kakršen koli besedilni vnos v model',
+ },
+ resolution: {
+ low: 'Nizek',
+ high: 'Visok',
+ name: 'Resolucija',
+ },
+ outputVars: {
+ usage: 'Informacije o uporabi modela',
+ output: 'Ustvarjanje vsebine',
+ },
+ singleRun: {
+ variable: 'Spremenljivka',
+ },
+ notSetContextInPromptTip: 'Če želite omogočiti funkcijo konteksta, izpolnite kontekstno spremenljivko v PROMPT.',
+ sysQueryInUser: 'sys.query v sporočilu uporabnika je obvezen',
+ model: 'model',
+ files: 'Datoteke',
+ addMessage: 'Dodaj sporočilo',
+ context: 'Kontekstu',
+ variables: 'Spremenljivke',
+ prompt: 'Uren',
+ vision: 'vid',
+ contextTooltip: 'Znanje lahko uvozite kot kontekst',
+ },
+ knowledgeRetrieval: {
+ outputVars: {
+ title: 'Segmentirani naslov',
+ url: 'Segmentirani URL',
+ output: 'Pridobivanje segmentiranih podatkov',
+ icon: 'Segmentirana ikona',
+ metadata: 'Drugi metapodatki',
+ content: 'Segmentirana vsebina',
+ },
+ queryVariable: 'Spremenljivka poizvedbe',
+ knowledge: 'Znanje',
+ },
+ http: {
+ outputVars: {
+ headers: 'JSON seznama glav odgovorov',
+ body: 'Vsebina odgovora',
+ files: 'Seznam datotek',
+ statusCode: 'Koda stanja odgovora',
+ },
+ authorization: {
+ 'authorization': 'Dovoljenje',
+ 'header': 'Glava',
+ 'bearer': 'Nosilec',
+ 'api-key-title': 'API ključ',
+ 'basic': 'Osnoven',
+ 'no-auth': 'Nobena',
+ 'custom': 'Običaj',
+ 'authorizationType': 'Vrsta dovoljenja',
+ 'auth-type': 'Vrsta preverjanja pristnosti',
+ 'api-key': 'Ključ API-ja',
+ },
+ timeout: {
+ readPlaceholder: 'Vnos časovne omejitve branja v sekundah',
+ writePlaceholder: 'Vnesite časovno omejitev pisanja v sekundah',
+ writeLabel: 'Časovna omejitev pisanja',
+ connectLabel: 'Časovna omejitev povezave',
+ title: 'Timeout',
+ readLabel: 'Časovna omejitev branja',
+ connectPlaceholder: 'Vnos časovne omejitve povezave v sekundah',
+ },
+ value: 'Vrednost',
+ key: 'Ključ',
+ notStartWithHttp: 'API se mora začeti z http:// ali https://',
+ body: 'Telo',
+ type: 'Vrsta',
+ inputVars: 'Vhodne spremenljivke',
+ bulkEdit: 'Urejanje v velikem obsegu',
+ insertVarPlaceholder: 'vnesite "/" za vstavljanje spremenljivke',
+ api: 'API',
+ keyValueEdit: 'Urejanje ključ-vrednost',
+ binaryFileVariable: 'Spremenljivka binarne datoteke',
+ headers: 'Glave',
+ apiPlaceholder: 'Vnesite URL, vnesite \'/\' vstavi spremenljivko',
+ extractListPlaceholder: 'Vnesite indeks elementa seznama, vnesite \'/\' vstavi spremenljivko',
+ params: 'Params',
+ },
+ code: {
+ inputVars: 'Vhodne spremenljivke',
+ outputVars: 'Izhodne spremenljivke',
+ searchDependencies: 'Odvisnosti iskanja',
+ advancedDependenciesTip: 'Tukaj dodajte nekaj vnaprej naloženih odvisnosti, ki trajajo dlje časa ali niso privzeto vgrajene',
+ advancedDependencies: 'Napredne odvisnosti',
+ },
+ templateTransform: {
+ outputVars: {
+ output: 'Preoblikovana vsebina',
+ },
+ code: 'Koda',
+ inputVars: 'Vhodne spremenljivke',
+ codeSupportTip: 'Podpira samo Jinja2',
+ },
+ ifElse: {
+ comparisonOperator: {
+ 'all of': 'vse',
+ 'is not': 'ni',
+ 'not empty': 'ni prazen',
+ 'start with': 'Začnite z',
+ 'is': 'Je',
+ 'null': 'je nična',
+ 'not exists': 'ne obstaja',
+ 'contains': 'Vsebuje',
+ 'empty': 'je prazen',
+ 'exists': 'Obstaja',
+ 'in': 'v',
+ 'not contains': 'ne vsebuje',
+ 'end with': 'Končaj z',
+ 'not in': 'ni v',
+ 'not null': 'ni nična',
+ },
+ optionName: {
+ video: 'Video',
+ doc: 'Doc',
+ audio: 'Avdio',
+ image: 'Podoba',
+ url: 'Spletni naslov',
+ localUpload: 'Lokalno nalaganje',
+ },
+ and: 'in',
+ else: 'Drugega',
+ enterValue: 'Vnesite vrednost',
+ elseDescription: 'Uporablja se za določanje logike, ki jo je treba izvesti, ko pogoj if ni izpolnjen.',
+ addCondition: 'Dodajanje pogoja',
+ if: 'Če',
+ select: 'Izbrati',
+ selectVariable: 'Izberite spremenljivko ...',
+ conditionNotSetup: 'Pogoj NI nastavljen',
+ addSubVariable: 'Podspremenljivka',
+ notSetVariable: 'Prosimo, najprej nastavite spremenljivko',
+ operator: 'Operaterja',
+ or: 'ali',
+ },
+ variableAssigner: {
+ type: {
+ string: 'Niz',
+ object: 'Predmet',
+ array: 'Matrika',
+ number: 'Številka',
+ },
+ outputVars: {
+ varDescribe: '{{groupName}} izhod',
+ },
+ addGroup: 'Dodajanje skupine',
+ outputType: 'Vrsta izhoda',
+ title: 'Dodeljevanje spremenljivk',
+ noVarTip: 'Seštevanje spremenljivk, ki jih je treba dodeliti',
+ aggregationGroupTip: 'Če omogočite to funkcijo, lahko združevalnik spremenljivk združi več naborov spremenljivk.',
+ aggregationGroup: 'Združevalna skupina',
+ varNotSet: 'Spremenljivka ni nastavljena',
+ setAssignVariable: 'Nastavitev spremenljivke dodelitve',
+ },
+ assigner: {
+ 'writeMode': 'Način pisanja',
+ 'plus': 'Plus',
+ 'variable': 'Spremenljivka',
+ 'clear': 'Jasen',
+ 'append': 'Dodaj',
+ 'assignedVariable': 'Dodeljena spremenljivka',
+ 'setVariable': 'Nastavi spremenljivko',
+ 'over-write': 'Prepisati',
+ 'writeModeTip': 'Način dodajanja: Na voljo samo za spremenljivke polja.',
+ },
+ tool: {
+ outputVars: {
+ files: {
+ transfer_method: 'Način prenosa. Vrednost je remote_url ali local_file',
+ upload_file_id: 'Naloži ID datoteke',
+ type: 'Vrsta podpore. Zdaj podpiramo samo sliko',
+ url: 'URL slike',
+ title: 'Datoteke, ustvarjene z orodjem',
+ },
+ json: 'JSON, ustvarjen z orodjem',
+ text: 'Vsebina, ustvarjena z orodjem',
+ },
+ inputVars: 'Vhodne spremenljivke',
+ toAuthorize: 'Za odobritev',
+ },
+ questionClassifiers: {
+ outputVars: {
+ className: 'Ime razreda',
+ },
+ instruction: 'Navodilo',
+ classNamePlaceholder: 'Napišite ime svojega razreda',
+ addClass: 'Dodajanje razreda',
+ instructionPlaceholder: 'Napišite navodila',
+ topicName: 'Ime teme',
+ topicPlaceholder: 'Napišite ime teme',
+ class: 'Razred',
+ advancedSetting: 'Napredne nastavitve',
+ model: 'model',
+ inputVars: 'Vhodne spremenljivke',
+ instructionTip: 'Vnesite dodatna navodila, ki bodo klasifikatorju vprašanj pomagala bolje razumeti, kako kategorizirati vprašanja.',
+ },
+ parameterExtractor: {
+ addExtractParameterContent: {
+ description: 'Opis',
+ typePlaceholder: 'Vrsta parametra izvlečka',
+ requiredContent: 'Zahtevano se uporablja samo kot referenca za sklepanje modela in ne za obvezno validacijo izhodnega parametra.',
+ required: 'Zahteva',
+ type: 'Vrsta',
+ namePlaceholder: 'Izvleček imena parametra',
+ descriptionPlaceholder: 'Opis parametra izvlečka',
+ name: 'Ime',
+ },
+ isSuccess: 'Je uspeh.Pri uspehu je vrednost 1, pri neuspehu je vrednost 0.',
+ addExtractParameter: 'Dodajanje parametra izvlečka',
+ importFromTool: 'Uvoz iz orodij',
+ reasoningModeTip: 'Izberete lahko ustrezen način sklepanja glede na sposobnost modela, da se odzove na navodila za klicanje funkcij ali pozive.',
+ inputVar: 'Vhodna spremenljivka',
+ advancedSetting: 'Napredne nastavitve',
+ errorReason: 'Razlog za napako',
+ reasoningMode: 'Način sklepanja',
+ instruction: 'Navodilo',
+ instructionTip: 'Vnesite dodatna navodila, ki bodo ekstraktorju parametrov pomagala razumeti, kako izvleči parametre.',
+ extractParametersNotSet: 'Izvleček parametrov ni nastavljen',
+ extractParameters: 'Izvleček parametrov',
+ },
+ iteration: {
+ ErrorMethod: {
+ continueOnError: 'Nadaljuj ob napaki',
+ removeAbnormalOutput: 'Odstranite nenormalen izhod',
+ operationTerminated: 'Prekinjena',
+ },
+ output: 'Izhodne spremenljivke',
+ parallelMode: 'Vzporedni način',
+ MaxParallelismTitle: 'Največji vzporednost',
+ errorResponseMethod: 'Način odziva na napako',
+ parallelModeEnableDesc: 'V vzporednem načinu opravila v iteracijah podpirajo vzporedno izvajanje. To lahko konfigurirate na plošči z lastnostmi na desni.',
+ error_one: '{{štetje}} Napaka',
+ comma: ',',
+ parallelModeUpper: 'VZPOREDNI NAČIN',
+ parallelModeEnableTitle: 'Vzporedni način omogočen',
+ currentIteration: 'Trenutna ponovitev',
+ error_other: '{{štetje}} Napake',
+ input: 'Vhodni',
+ deleteTitle: 'Izbrisati iteracijsko vozlišče?',
+ parallelPanelDesc: 'V vzporednem načinu opravila v iteraciji podpirajo vzporedno izvajanje.',
+ deleteDesc: 'Če izbrišete iteracijsko vozlišče, boste izbrisali vsa podrejena vozlišča',
+ iteration_other: '{{štetje}} Ponovitev',
+ answerNodeWarningDesc: 'Opozorilo vzporednega načina: Vozlišča za odgovore, dodelitve spremenljivk pogovora in trajne operacije branja / pisanja v iteracijah lahko povzročijo izjeme.',
+ MaxParallelismDesc: 'Največja vzporednost se uporablja za nadzor števila nalog, ki se izvajajo hkrati v eni ponovitvi.',
+ iteration_one: '{{štetje}} Ponovitev',
+ },
+ note: {
+ editor: {
+ medium: 'Srednja',
+ openLink: 'Odprt',
+ showAuthor: 'Pokaži avtorja',
+ bold: 'Smel',
+ strikethrough: 'Prečrtano',
+ large: 'Velik',
+ link: 'Povezava',
+ enterUrl: 'Vnesite URL ...',
+ small: 'Majhen',
+ italic: 'Ležeče',
+ invalidUrl: 'Neveljaven URL',
+ unlink: 'Prekini povezavo',
+ placeholder: 'Napišite svojo opombo ...',
+ bulletList: 'Seznam oznak',
+ },
+ addNote: 'Dodaj opombo',
+ },
+ docExtractor: {
+ outputVars: {
+ text: 'Izvlečeno besedilo',
+ },
+ inputVar: 'Vhodna spremenljivka',
+ learnMore: 'Izvedi več',
+ supportFileTypes: 'Podporne vrste datotek: {{types}}.',
+ },
+ listFilter: {
+ outputVars: {
+ result: 'Rezultat filtriranja',
+ first_record: 'Prvi zapis',
+ last_record: 'Zadnji zapis',
+ },
+ extractsCondition: 'Ekstrahiranje elementa N',
+ selectVariableKeyPlaceholder: 'Izberite ključ podspremenljivke',
+ asc: 'ASC',
+ orderBy: 'Naročite po',
+ filterCondition: 'Pogoj filtra',
+ filterConditionKey: 'Ključ pogoja filtra',
+ desc: 'DESC',
+ limit: 'Vrh N',
+ filterConditionComparisonOperator: 'Operator za primerjavo pogojev filtra',
+ inputVar: 'Vhodna spremenljivka',
+ filterConditionComparisonValue: 'Vrednost pogoja filtra',
+ },
+ },
}
export default translation
diff --git a/web/i18n/tr-TR/common.ts b/web/i18n/tr-TR/common.ts
index 0438a637ce..3a80684560 100644
--- a/web/i18n/tr-TR/common.ts
+++ b/web/i18n/tr-TR/common.ts
@@ -591,6 +591,10 @@ const translation = {
pasteFileLinkInvalid: 'Geçersiz dosya bağlantısı',
fileExtensionNotSupport: 'Dosya uzantısı desteklenmiyor',
},
+ license: {
+ expiring_plural: '{{count}} gün içinde sona eriyor',
+ expiring: 'Bir günde sona eriyor',
+ },
}
export default translation
diff --git a/web/i18n/tr-TR/login.ts b/web/i18n/tr-TR/login.ts
index b6727082a6..e742548dc5 100644
--- a/web/i18n/tr-TR/login.ts
+++ b/web/i18n/tr-TR/login.ts
@@ -99,6 +99,12 @@ const translation = {
noLoginMethodTip: 'Bir kimlik doğrulama yöntemi eklemek için lütfen sistem yöneticisine başvurun.',
sendVerificationCode: 'Doğrulama Kodu Gönder',
back: 'Geri',
+ licenseExpiredTip: 'Çalışma alanınız için Dify Enterprise lisansının süresi doldu. Dify\'ı kullanmaya devam etmek için lütfen yöneticinizle iletişime geçin.',
+ licenseLostTip: 'Dify lisans sunucusuna bağlanılamadı. Dify\'ı kullanmaya devam etmek için lütfen yöneticinizle iletişime geçin.',
+ licenseInactiveTip: 'Çalışma alanınız için Dify Enterprise lisansı etkin değil. Dify\'ı kullanmaya devam etmek için lütfen yöneticinizle iletişime geçin.',
+ licenseExpired: 'Lisansın Süresi Doldu',
+ licenseLost: 'Lisans Kaybedildi',
+ licenseInactive: 'Lisans Etkin Değil',
}
export default translation
diff --git a/web/i18n/tr-TR/workflow.ts b/web/i18n/tr-TR/workflow.ts
index e6e25f6d0e..50dc390d54 100644
--- a/web/i18n/tr-TR/workflow.ts
+++ b/web/i18n/tr-TR/workflow.ts
@@ -408,6 +408,7 @@ const translation = {
},
type: 'Tür',
binaryFileVariable: 'İkili Dosya Değişkeni',
+ extractListPlaceholder: 'Liste öğesi dizinini girin, \'/\' yazın değişken ekle',
},
code: {
inputVars: 'Giriş Değişkenleri',
@@ -619,6 +620,7 @@ const translation = {
filterConditionComparisonValue: 'Filtre Koşulu değeri',
selectVariableKeyPlaceholder: 'Alt değişken anahtarını seçin',
desc: 'DESC',
+ extractsCondition: 'N öğesini ayıklayın',
},
},
tracing: {
diff --git a/web/i18n/uk-UA/common.ts b/web/i18n/uk-UA/common.ts
index c3d3ef23b3..e3e4a39722 100644
--- a/web/i18n/uk-UA/common.ts
+++ b/web/i18n/uk-UA/common.ts
@@ -592,6 +592,10 @@ const translation = {
uploadFromComputerReadError: 'Не вдалося прочитати файл, будь ласка, спробуйте ще раз.',
uploadFromComputerUploadError: 'Не вдалося завантажити файл, будь ласка, завантажте ще раз.',
},
+ license: {
+ expiring: 'Термін дії закінчується за один день',
+ expiring_plural: 'Термін дії закінчується за {{count}} днів',
+ },
}
export default translation
diff --git a/web/i18n/uk-UA/login.ts b/web/i18n/uk-UA/login.ts
index cdb0b79d8e..e6d1d15dd5 100644
--- a/web/i18n/uk-UA/login.ts
+++ b/web/i18n/uk-UA/login.ts
@@ -99,6 +99,12 @@ const translation = {
noLoginMethodTip: 'Будь ласка, зверніться до адміністратора системи, щоб додати метод автентифікації.',
resetPasswordDesc: 'Введіть адресу електронної пошти, яку ви використовували для реєстрації на Dify, і ми надішлемо вам електронний лист для скидання пароля.',
resetPassword: 'Скинути пароль',
+ licenseLostTip: 'Не вдалося підключити сервер ліцензій Dify. Будь ласка, зверніться до свого адміністратора, щоб продовжити користуватися Dify.',
+ licenseExpired: 'Термін дії ліцензії минув',
+ licenseInactive: 'Ліцензія неактивна',
+ licenseLost: 'Ліцензію втрачено',
+ licenseInactiveTip: 'Ліцензія Dify Enterprise для вашої робочої області неактивна. Будь ласка, зверніться до свого адміністратора, щоб продовжити користуватися Dify.',
+ licenseExpiredTip: 'Термін дії ліцензії Dify Enterprise для вашого робочого простору закінчився. Будь ласка, зверніться до свого адміністратора, щоб продовжити користуватися Dify.',
}
export default translation
diff --git a/web/i18n/uk-UA/workflow.ts b/web/i18n/uk-UA/workflow.ts
index 663b5e4c13..6958e72ce2 100644
--- a/web/i18n/uk-UA/workflow.ts
+++ b/web/i18n/uk-UA/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
type: 'Тип',
binaryFileVariable: 'Змінна двійкового файлу',
+ extractListPlaceholder: 'Введіть індекс елемента списку, введіть \'/\' вставити змінну',
},
code: {
inputVars: 'Вхідні змінні',
@@ -618,6 +619,7 @@ const translation = {
orderBy: 'Замовити по',
filterConditionComparisonOperator: 'Оператор порівняння умов фільтра',
filterConditionComparisonValue: 'Значення умови фільтра',
+ extractsCondition: 'Витягніть елемент N',
},
},
tracing: {
diff --git a/web/i18n/vi-VN/common.ts b/web/i18n/vi-VN/common.ts
index 45282ad5d2..8a25cd52f2 100644
--- a/web/i18n/vi-VN/common.ts
+++ b/web/i18n/vi-VN/common.ts
@@ -591,6 +591,10 @@ const translation = {
uploadFromComputerUploadError: 'Tải lên tệp không thành công, vui lòng tải lên lại.',
uploadFromComputerReadError: 'Đọc tệp không thành công, vui lòng thử lại.',
},
+ license: {
+ expiring_plural: 'Hết hạn sau {{count}} ngày',
+ expiring: 'Hết hạn trong một ngày',
+ },
}
export default translation
diff --git a/web/i18n/vi-VN/login.ts b/web/i18n/vi-VN/login.ts
index a07c1bf807..ab4ab68f48 100644
--- a/web/i18n/vi-VN/login.ts
+++ b/web/i18n/vi-VN/login.ts
@@ -99,6 +99,12 @@ const translation = {
usePassword: 'Sử dụng mật khẩu',
resetPassword: 'Đặt lại mật khẩu',
sendVerificationCode: 'Gửi mã xác minh',
+ licenseInactive: 'Giấy phép không hoạt động',
+ licenseLost: 'Mất giấy phép',
+ licenseInactiveTip: 'Giấy phép Dify Enterprise cho không gian làm việc của bạn không hoạt động. Vui lòng liên hệ với quản trị viên của bạn để tiếp tục sử dụng Dify.',
+ licenseExpired: 'Giấy phép đã hết hạn',
+ licenseExpiredTip: 'Giấy phép Dify Enterprise cho không gian làm việc của bạn đã hết hạn. Vui lòng liên hệ với quản trị viên của bạn để tiếp tục sử dụng Dify.',
+ licenseLostTip: 'Không thể kết nối máy chủ cấp phép Dify. Vui lòng liên hệ với quản trị viên của bạn để tiếp tục sử dụng Dify.',
}
export default translation
diff --git a/web/i18n/vi-VN/workflow.ts b/web/i18n/vi-VN/workflow.ts
index 1176fdd2b5..b90c89cb20 100644
--- a/web/i18n/vi-VN/workflow.ts
+++ b/web/i18n/vi-VN/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
binaryFileVariable: 'Biến tệp nhị phân',
type: 'Kiểu',
+ extractListPlaceholder: 'Nhập chỉ mục mục danh sách, nhập \'/\' chèn biến',
},
code: {
inputVars: 'Biến đầu vào',
@@ -618,6 +619,7 @@ const translation = {
filterCondition: 'Điều kiện lọc',
asc: 'ASC',
filterConditionComparisonOperator: 'Toán tử so sánh điều kiện bộ lọc',
+ extractsCondition: 'Giải nén mục N',
},
},
tracing: {
diff --git a/web/i18n/zh-Hans/app-api.ts b/web/i18n/zh-Hans/app-api.ts
index a0defdab62..f59d9065a6 100644
--- a/web/i18n/zh-Hans/app-api.ts
+++ b/web/i18n/zh-Hans/app-api.ts
@@ -78,6 +78,7 @@ const translation = {
requestBody: 'Request Body',
pathParams: 'Path Params',
query: 'Query',
+ toc: '目录',
},
}
diff --git a/web/i18n/zh-Hans/common.ts b/web/i18n/zh-Hans/common.ts
index ab87085652..d9add510e9 100644
--- a/web/i18n/zh-Hans/common.ts
+++ b/web/i18n/zh-Hans/common.ts
@@ -602,6 +602,10 @@ const translation = {
created: '标签创建成功',
failed: '标签创建失败',
},
+ license: {
+ expiring: '许可证还有 1 天到期',
+ expiring_plural: '许可证还有 {{count}} 天到期',
+ },
}
export default translation
diff --git a/web/i18n/zh-Hans/login.ts b/web/i18n/zh-Hans/login.ts
index 40697701da..7f64c954b1 100644
--- a/web/i18n/zh-Hans/login.ts
+++ b/web/i18n/zh-Hans/login.ts
@@ -99,6 +99,12 @@ const translation = {
back: '返回',
noLoginMethod: '未配置身份认证方式',
noLoginMethodTip: '请联系系统管理员添加身份认证方式',
+ licenseExpired: '许可证已过期',
+ licenseExpiredTip: '您所在空间的 Dify Enterprise 许可证已过期,请联系管理员以继续使用 Dify。',
+ licenseLost: '许可证丢失',
+ licenseLostTip: '无法连接 Dify 许可证服务器,请联系管理员以继续使用 Dify。',
+ licenseInactive: '许可证未激活',
+ licenseInactiveTip: '您所在空间的 Dify Enterprise 许可证尚未激活,请联系管理员以继续使用 Dify。',
}
export default translation
diff --git a/web/i18n/zh-Hant/common.ts b/web/i18n/zh-Hant/common.ts
index 184331d6d5..f801a64d10 100644
--- a/web/i18n/zh-Hant/common.ts
+++ b/web/i18n/zh-Hant/common.ts
@@ -591,6 +591,10 @@ const translation = {
fileExtensionNotSupport: '不支援檔擴展名',
uploadFromComputerLimit: '上傳文件不能超過 {{size}}',
},
+ license: {
+ expiring: '將在1天內過期',
+ expiring_plural: '將在 {{count}} 天后過期',
+ },
}
export default translation
diff --git a/web/i18n/zh-Hant/login.ts b/web/i18n/zh-Hant/login.ts
index a35346e71a..6f2b834118 100644
--- a/web/i18n/zh-Hant/login.ts
+++ b/web/i18n/zh-Hant/login.ts
@@ -99,6 +99,12 @@ const translation = {
back: '返回',
resetPasswordDesc: '輸入您用於註冊 Dify 的電子郵件,我們將向您發送一封密碼重置電子郵件。',
usePassword: '使用密碼',
+ licenseExpiredTip: '您的工作區的 Dify Enterprise 許可證已過期。請聯繫您的管理員以繼續使用 Dify。',
+ licenseExpired: '許可證已過期',
+ licenseLost: '許可證丟失',
+ licenseInactive: '許可證處於非活動狀態',
+ licenseInactiveTip: '您的工作區的 Dify Enterprise 許可證處於非活動狀態。請聯繫您的管理員以繼續使用 Dify。',
+ licenseLostTip: '無法連接 Dify 許可證伺服器。請聯繫您的管理員以繼續使用 Dify。',
}
export default translation
diff --git a/web/i18n/zh-Hant/workflow.ts b/web/i18n/zh-Hant/workflow.ts
index f3fbfdedc2..3cd7ad2499 100644
--- a/web/i18n/zh-Hant/workflow.ts
+++ b/web/i18n/zh-Hant/workflow.ts
@@ -407,6 +407,7 @@ const translation = {
},
type: '類型',
binaryFileVariable: '二進位檔變數',
+ extractListPlaceholder: '輸入清單項索引,鍵入 『/』 插入變數',
},
code: {
inputVars: '輸入變量',
@@ -618,6 +619,7 @@ const translation = {
selectVariableKeyPlaceholder: 'Select sub variable key (選擇子變數鍵)',
filterConditionComparisonOperator: 'Filter Condition Comparison 運算符',
filterConditionKey: '篩選條件鍵',
+ extractsCondition: '提取第 N 項',
},
},
tracing: {
diff --git a/web/package.json b/web/package.json
index 0c0317b803..684754f72b 100644
--- a/web/package.json
+++ b/web/package.json
@@ -1,6 +1,6 @@
{
"name": "dify-web",
- "version": "0.11.1",
+ "version": "0.11.2",
"private": true,
"engines": {
"node": ">=18.17.0"
@@ -44,9 +44,8 @@
"@sentry/utils": "^7.54.0",
"@svgdotjs/svg.js": "^3.2.4",
"@tailwindcss/typography": "^0.5.15",
- "@tanstack/react-query": "^5.59.20",
- "@tanstack/react-query-devtools": "^5.59.20",
- "@types/hast": "^3.0.4",
+ "@tanstack/react-query": "^5.60.5",
+ "@tanstack/react-query-devtools": "^5.60.5",
"ahooks": "^3.8.1",
"class-variance-authority": "^0.7.0",
"classnames": "^2.5.1",
diff --git a/web/service/base.ts b/web/service/base.ts
index 4519c5f006..38ae5094e0 100644
--- a/web/service/base.ts
+++ b/web/service/base.ts
@@ -18,7 +18,8 @@ import type {
} from '@/types/workflow'
import { removeAccessToken } from '@/app/components/share/utils'
import type { FetchOptionType, ResponseError } from './fetch'
-import { ContentType, base, baseOptions, getPublicToken } from './fetch'
+import { ContentType, base, baseOptions, getAccessToken } from './fetch'
+import { asyncRunSafe } from '@/utils'
const TIME_OUT = 100000
export type IOnDataMoreInfo = {
@@ -253,14 +254,7 @@ const baseFetch = base
export const upload = (options: any, isPublicAPI?: boolean, url?: string, searchParams?: string): Promise => {
const urlPrefix = isPublicAPI ? PUBLIC_API_PREFIX : API_PREFIX
- let token = ''
- if (isPublicAPI) {
- token = getPublicToken()
- }
- else {
- const accessToken = localStorage.getItem('console_token') || ''
- token = accessToken
- }
+ const token = getAccessToken(isPublicAPI)
const defaultOptions = {
method: 'POST',
url: (url ? `${urlPrefix}${url}` : `${urlPrefix}/files/upload`) + (searchParams || ''),
@@ -351,6 +345,9 @@ export const ssePost = (
if (body)
options.body = JSON.stringify(body)
+ const accessToken = getAccessToken(isPublicAPI)
+ options.headers!.set('Authorization', `Bearer ${accessToken}`)
+
globalThis.fetch(urlWithPrefix, options as RequestInit)
.then((res) => {
if (!/^(2|3)\d{2}$/.test(String(res.status))) {
@@ -398,54 +395,78 @@ export const ssePost = (
}
// base request
-export const request = (url: string, options = {}, otherOptions: IOtherOptions = {}) => {
- return new Promise((resolve, reject) => {
- baseFetch(url, options, otherOptions).then(resolve).catch((errResp) => {
- if (errResp?.status === 401) {
- return refreshAccessTokenOrRelogin(TIME_OUT).then(() => {
- baseFetch(url, options, otherOptions).then(resolve).catch(reject)
- }).catch(() => {
- const {
- isPublicAPI = false,
- silent,
- } = otherOptions
- const bodyJson = errResp.json()
- if (isPublicAPI) {
- return bodyJson.then((data: ResponseError) => {
- if (data.code === 'web_sso_auth_required')
- requiredWebSSOLogin()
-
- if (data.code === 'unauthorized') {
- removeAccessToken()
- globalThis.location.reload()
- }
-
- return Promise.reject(data)
- })
- }
- const loginUrl = `${globalThis.location.origin}/signin`
- bodyJson.then((data: ResponseError) => {
- if (data.code === 'init_validate_failed' && IS_CE_EDITION && !silent)
- Toast.notify({ type: 'error', message: data.message, duration: 4000 })
- else if (data.code === 'not_init_validated' && IS_CE_EDITION)
- globalThis.location.href = `${globalThis.location.origin}/init`
- else if (data.code === 'not_setup' && IS_CE_EDITION)
- globalThis.location.href = `${globalThis.location.origin}/install`
- else if (location.pathname !== '/signin' || !IS_CE_EDITION)
- globalThis.location.href = loginUrl
- else if (!silent)
- Toast.notify({ type: 'error', message: data.message })
- }).catch(() => {
- // Handle any other errors
- globalThis.location.href = loginUrl
- })
- })
+export const request = async(url: string, options = {}, otherOptions?: IOtherOptions) => {
+ try {
+ const otherOptionsForBaseFetch = otherOptions || {}
+ const [err, resp] = await asyncRunSafe(baseFetch(url, options, otherOptionsForBaseFetch))
+ if (err === null)
+ return resp
+ const errResp: Response = err as any
+ if (errResp.status === 401) {
+ const [parseErr, errRespData] = await asyncRunSafe(errResp.json())
+ const loginUrl = `${globalThis.location.origin}/signin`
+ if (parseErr) {
+ globalThis.location.href = loginUrl
+ return Promise.reject(err)
}
- else {
- reject(errResp)
+ // special code
+ const { code, message } = errRespData
+ // webapp sso
+ if (code === 'web_sso_auth_required') {
+ requiredWebSSOLogin()
+ return Promise.reject(err)
}
- })
- })
+ if (code === 'unauthorized_and_force_logout') {
+ localStorage.removeItem('console_token')
+ localStorage.removeItem('refresh_token')
+ globalThis.location.reload()
+ return Promise.reject(err)
+ }
+ const {
+ isPublicAPI = false,
+ silent,
+ } = otherOptionsForBaseFetch
+ if (isPublicAPI && code === 'unauthorized') {
+ removeAccessToken()
+ globalThis.location.reload()
+ return Promise.reject(err)
+ }
+ if (code === 'init_validate_failed' && IS_CE_EDITION && !silent) {
+ Toast.notify({ type: 'error', message, duration: 4000 })
+ return Promise.reject(err)
+ }
+ if (code === 'not_init_validated' && IS_CE_EDITION) {
+ globalThis.location.href = `${globalThis.location.origin}/init`
+ return Promise.reject(err)
+ }
+ if (code === 'not_setup' && IS_CE_EDITION) {
+ globalThis.location.href = `${globalThis.location.origin}/install`
+ return Promise.reject(err)
+ }
+
+ // refresh token
+ const [refreshErr] = await asyncRunSafe(refreshAccessTokenOrRelogin(TIME_OUT))
+ if (refreshErr === null)
+ return baseFetch(url, options, otherOptionsForBaseFetch)
+ if (location.pathname !== '/signin' || !IS_CE_EDITION) {
+ globalThis.location.href = loginUrl
+ return Promise.reject(err)
+ }
+ if (!silent) {
+ Toast.notify({ type: 'error', message })
+ return Promise.reject(err)
+ }
+ globalThis.location.href = loginUrl
+ return Promise.reject(err)
+ }
+ else {
+ return Promise.reject(err)
+ }
+ }
+ catch (error) {
+ console.error(error)
+ return Promise.reject(error)
+ }
}
// request methods
diff --git a/web/service/fetch.ts b/web/service/fetch.ts
index 0ca804c01e..666a3e2336 100644
--- a/web/service/fetch.ts
+++ b/web/service/fetch.ts
@@ -72,18 +72,36 @@ export const getPublicToken = () => {
try {
accessTokenJson = JSON.parse(accessToken)
}
- catch {}
+ catch { }
token = accessTokenJson[sharedToken]
return token || ''
}
+export function getAccessToken(isPublicAPI?: boolean) {
+ if (isPublicAPI) {
+ const sharedToken = globalThis.location.pathname.split('/').slice(-1)[0]
+ const accessToken = localStorage.getItem('token') || JSON.stringify({ [sharedToken]: '' })
+ let accessTokenJson = { [sharedToken]: '' }
+ try {
+ accessTokenJson = JSON.parse(accessToken)
+ }
+ catch (e) {
+
+ }
+ return accessTokenJson[sharedToken]
+ }
+ else {
+ return localStorage.getItem('console_token') || ''
+ }
+}
+
const beforeRequestPublicAuthorization: BeforeRequestHook = (request) => {
- const token = getPublicToken()
+ const token = getAccessToken(true)
request.headers.set('Authorization', `Bearer ${token}`)
}
const beforeRequestAuthorization: BeforeRequestHook = (request) => {
- const accessToken = localStorage.getItem('console_token') || ''
+ const accessToken = getAccessToken()
request.headers.set('Authorization', `Bearer ${accessToken}`)
}
@@ -175,7 +193,7 @@ async function base(url: string, options: FetchOptionType = {}, otherOptions:
const contentType = res.headers.get('content-type')
if (
contentType
- && [ContentType.download, ContentType.audio].includes(contentType)
+ && [ContentType.download, ContentType.audio].includes(contentType)
)
return await res.blob() as T
diff --git a/web/types/feature.ts b/web/types/feature.ts
index 4a383f0a02..662405c1dd 100644
--- a/web/types/feature.ts
+++ b/web/types/feature.ts
@@ -4,7 +4,21 @@ export enum SSOProtocol {
OAuth2 = 'oauth2',
}
-export interface SystemFeatures {
+export enum LicenseStatus {
+ NONE = 'none',
+ INACTIVE = 'inactive',
+ ACTIVE = 'active',
+ EXPIRING = 'expiring',
+ EXPIRED = 'expired',
+ LOST = 'lost',
+}
+
+type License = {
+ status: LicenseStatus
+ expired_at: string | null
+}
+
+export type SystemFeatures = {
sso_enforced_for_signin: boolean
sso_enforced_for_signin_protocol: SSOProtocol | ''
sso_enforced_for_web: boolean
@@ -16,6 +30,7 @@ export interface SystemFeatures {
enable_social_oauth_login: boolean
is_allow_create_workspace: boolean
is_allow_register: boolean
+ license: License
}
export const defaultSystemFeatures: SystemFeatures = {
@@ -30,4 +45,8 @@ export const defaultSystemFeatures: SystemFeatures = {
enable_social_oauth_login: false,
is_allow_create_workspace: false,
is_allow_register: false,
+ license: {
+ status: LicenseStatus.NONE,
+ expired_at: '',
+ },
}
diff --git a/web/utils/context.ts b/web/utils/context.ts
new file mode 100644
index 0000000000..8829a679ce
--- /dev/null
+++ b/web/utils/context.ts
@@ -0,0 +1,45 @@
+import { type Context, type Provider, createContext, useContext } from 'react'
+import * as selector from 'use-context-selector'
+
+const createCreateCtxFunction = (
+ useContextImpl: typeof useContext,
+ createContextImpl: typeof createContext) => {
+ return function({ name, defaultValue }: CreateCtxOptions = {}): CreateCtxReturn {
+ const emptySymbol = Symbol(`empty ${name}`)
+ // @ts-expect-error it's ok here
+ const context = createContextImpl(defaultValue ?? emptySymbol)
+ const useContextValue = () => {
+ const ctx = useContextImpl(context)
+ if (ctx === emptySymbol)
+ throw new Error(`No ${name ?? 'related'} context found.`)
+
+ return ctx
+ }
+ const result = [context.Provider, useContextValue, context] as CreateCtxReturn
+ result.context = context
+ result.provider = context.Provider
+ result.useContextValue = useContextValue
+ return result
+ }
+}
+
+type CreateCtxOptions = {
+ defaultValue?: T
+ name?: string
+}
+
+type CreateCtxReturn = [Provider, () => T, Context] & {
+ context: Context
+ provider: Provider
+ useContextValue: () => T
+}
+
+// example
+// const [AppProvider, useApp, AppContext] = createCtx()
+
+export const createCtx = createCreateCtxFunction(useContext, createContext)
+
+export const createSelectorCtx = createCreateCtxFunction(
+ selector.useContext,
+ selector.createContext as typeof createContext,
+)
diff --git a/web/utils/index.ts b/web/utils/index.ts
index b8b499ae32..4342a08519 100644
--- a/web/utils/index.ts
+++ b/web/utils/index.ts
@@ -8,10 +8,8 @@ export async function asyncRunSafe(fn: Promise): Promise<[Error] | [
try {
return [null, await fn]
}
- catch (e) {
- if (e instanceof Error)
- return [e]
- return [new Error('unknown error')]
+ catch (e: any) {
+ return [e || new Error('unknown error')]
}
}
diff --git a/web/yarn.lock b/web/yarn.lock
index 88a57dba70..7a2ba6cc90 100644
--- a/web/yarn.lock
+++ b/web/yarn.lock
@@ -3108,6 +3108,30 @@
lodash.merge "^4.6.2"
postcss-selector-parser "6.0.10"
+"@tanstack/query-core@5.60.5":
+ version "5.60.5"
+ resolved "https://registry.yarnpkg.com/@tanstack/query-core/-/query-core-5.60.5.tgz#37b7c5ab7e6894cea9ef341299a7a3febc2ea361"
+ integrity sha512-jiS1aC3XI3BJp83ZiTuDLerTmn9P3U95r6p+6/SNauLJaYxfIC4dMuWygwnBHIZxjn2zJqEpj3nysmPieoxfPQ==
+
+"@tanstack/query-devtools@5.59.20":
+ version "5.59.20"
+ resolved "https://registry.yarnpkg.com/@tanstack/query-devtools/-/query-devtools-5.59.20.tgz#a827ac682ec1268fc9c99e7b6eb739f35b5606aa"
+ integrity sha512-vxhuQ+8VV4YWQSFxQLsuM+dnEKRY7VeRzpNabFXdhEwsBYLrjXlF1pM38A8WyKNLqZy8JjyRO8oP4Wd/oKHwuQ==
+
+"@tanstack/react-query-devtools@^5.60.5":
+ version "5.60.5"
+ resolved "https://registry.yarnpkg.com/@tanstack/react-query-devtools/-/react-query-devtools-5.60.5.tgz#fe398b4896a292fbe835d3fd4799e929de94c25a"
+ integrity sha512-lzANl0ih3CNKBGUoXhhkAAHI1Y4Yqs9Jf3iuTUsGiPpmF0RWXTeYFaQxc+h1PhJz3VwYrIYCwmPoNts0mSjSuA==
+ dependencies:
+ "@tanstack/query-devtools" "5.59.20"
+
+"@tanstack/react-query@^5.60.5":
+ version "5.60.5"
+ resolved "https://registry.yarnpkg.com/@tanstack/react-query/-/react-query-5.60.5.tgz#3194c390f7eff20542b321c3042880dc3f1a81e2"
+ integrity sha512-M77bOsPwj1wYE56gk7iJvxGAr4IC12NWdIDhT+Eo8ldkWRHMvIR8I/rufIvT1OXoV/bl7EECwuRuMlxxWtvW2Q==
+ dependencies:
+ "@tanstack/query-core" "5.60.5"
+
"@testing-library/dom@10.4.0":
version "10.4.0"
resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-10.4.0.tgz#82a9d9462f11d240ecadbf406607c6ceeeff43a8"