diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index 116fc59ee8..37d351627b 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -39,25 +39,11 @@ jobs: - name: Install dependencies run: uv sync --project api --dev - - name: Run Unit tests - run: | - uv run --project api bash dev/pytest/pytest_unit_tests.sh - - name: Run pyrefly check run: | cd api uv add --dev pyrefly uv run pyrefly check || true - - name: Coverage Summary - run: | - set -x - # Extract coverage percentage and create a summary - TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])') - - # Create a detailed coverage summary - echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY - echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY - uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY - name: Run dify config tests run: uv run --project api dev/pytest/pytest_config_tests.py @@ -93,3 +79,19 @@ jobs: - name: Run TestContainers run: uv run --project api bash dev/pytest/pytest_testcontainers.sh + + - name: Run Unit tests + run: | + uv run --project api bash dev/pytest/pytest_unit_tests.sh + + - name: Coverage Summary + run: | + set -x + # Extract coverage percentage and create a summary + TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])') + + # Create a detailed coverage summary + echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY + echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY + uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY + diff --git a/.github/workflows/expose_service_ports.sh b/.github/workflows/expose_service_ports.sh index 01772ccf9f..e7d5f60288 100755 --- a/.github/workflows/expose_service_ports.sh +++ b/.github/workflows/expose_service_ports.sh @@ -1,6 +1,7 @@ #!/bin/bash yq eval '.services.weaviate.ports += ["8080:8080"]' -i docker/docker-compose.yaml +yq eval '.services.weaviate.ports += ["50051:50051"]' -i docker/docker-compose.yaml yq eval '.services.qdrant.ports += ["6333:6333"]' -i docker/docker-compose.yaml yq eval '.services.chroma.ports += ["8000:8000"]' -i docker/docker-compose.yaml yq eval '.services["milvus-standalone"].ports += ["19530:19530"]' -i docker/docker-compose.yaml @@ -13,4 +14,4 @@ yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/tidb/docker-compose.ya yq eval '.services.oceanbase.ports += ["2881:2881"]' -i docker/docker-compose.yaml yq eval '.services.opengauss.ports += ["6600:6600"]' -i docker/docker-compose.yaml -echo "Ports exposed for sandbox, weaviate, tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase, opengauss" +echo "Ports exposed for sandbox, weaviate (HTTP 8080, gRPC 50051), tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase, opengauss" diff --git a/AGENTS.md b/AGENTS.md index 5859cd1bd9..2ef7931efc 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -14,7 +14,7 @@ The codebase is split into: - Run backend CLI commands through `uv run --project api `. -- Backend QA gate requires passing `make lint`, `make type-check`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh` before review. +- Before submission, all backend modifications must pass local checks: `make lint`, `make type-check`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh`. - Use Makefile targets for linting and formatting; `make lint` and `make type-check` cover the required checks. diff --git a/README.md b/README.md index aadced582d..7c194e065a 100644 --- a/README.md +++ b/README.md @@ -129,8 +129,18 @@ Star Dify on GitHub and be instantly notified of new releases. ## Advanced Setup +### Custom configurations + If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). +### Metrics Monitoring with Grafana + +Import the dashboard to Grafana, using Dify's PostgreSQL database as data source, to monitor metrics in granularity of apps, tenants, messages, and more. + +- [Grafana Dashboard by @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Deployment with Kubernetes + If you'd like to configure a highly-available setup, there are community-contributed [Helm Charts](https://helm.sh/) and YAML files which allow Dify to be deployed on Kubernetes. - [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 5b871f69f9..a02f8a4d49 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -189,6 +189,11 @@ class PluginConfig(BaseSettings): default="plugin-api-key", ) + PLUGIN_DAEMON_TIMEOUT: PositiveFloat | None = Field( + description="Timeout in seconds for requests to the plugin daemon (set to None to disable)", + default=300.0, + ) + INNER_API_KEY_FOR_PLUGIN: str = Field(description="Inner api key for plugin", default="inner-api-key") PLUGIN_REMOTE_INSTALL_HOST: str = Field( @@ -543,7 +548,7 @@ class UpdateConfig(BaseSettings): class WorkflowVariableTruncationConfig(BaseSettings): WORKFLOW_VARIABLE_TRUNCATION_MAX_SIZE: PositiveInt = Field( - # 100KB + # 1000 KiB 1024_000, description="Maximum size for variable to trigger final truncation.", ) diff --git a/api/constants/__init__.py b/api/constants/__init__.py index 9141fbea95..248cdfc09f 100644 --- a/api/constants/__init__.py +++ b/api/constants/__init__.py @@ -55,3 +55,12 @@ else: "properties", } DOCUMENT_EXTENSIONS: set[str] = convert_to_lower_and_upper_set(_doc_extensions) + +COOKIE_NAME_ACCESS_TOKEN = "access_token" +COOKIE_NAME_REFRESH_TOKEN = "refresh_token" +COOKIE_NAME_PASSPORT = "passport" +COOKIE_NAME_CSRF_TOKEN = "csrf_token" + +HEADER_NAME_CSRF_TOKEN = "X-CSRF-Token" +HEADER_NAME_APP_CODE = "X-App-Code" +HEADER_NAME_PASSPORT = "X-App-Passport" diff --git a/api/controllers/console/admin.py b/api/controllers/console/admin.py index 93f242ad28..2c4d8709eb 100644 --- a/api/controllers/console/admin.py +++ b/api/controllers/console/admin.py @@ -15,6 +15,7 @@ from constants.languages import supported_language from controllers.console import api, console_ns from controllers.console.wraps import only_edition_cloud from extensions.ext_database import db +from libs.token import extract_access_token from models.model import App, InstalledApp, RecommendedApp @@ -24,19 +25,9 @@ def admin_required(view: Callable[P, R]): if not dify_config.ADMIN_API_KEY: raise Unauthorized("API key is invalid.") - auth_header = request.headers.get("Authorization") - if auth_header is None: + auth_token = extract_access_token(request) + if not auth_token: raise Unauthorized("Authorization header is missing.") - - if " " not in auth_header: - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - - auth_scheme, auth_token = auth_header.split(None, 1) - auth_scheme = auth_scheme.lower() - - if auth_scheme != "bearer": - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - if auth_token != dify_config.ADMIN_API_KEY: raise Unauthorized("API key is invalid.") @@ -70,15 +61,17 @@ class InsertExploreAppListApi(Resource): @only_edition_cloud @admin_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("app_id", type=str, required=True, nullable=False, location="json") - parser.add_argument("desc", type=str, location="json") - parser.add_argument("copyright", type=str, location="json") - parser.add_argument("privacy_policy", type=str, location="json") - parser.add_argument("custom_disclaimer", type=str, location="json") - parser.add_argument("language", type=supported_language, required=True, nullable=False, location="json") - parser.add_argument("category", type=str, required=True, nullable=False, location="json") - parser.add_argument("position", type=int, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("app_id", type=str, required=True, nullable=False, location="json") + .add_argument("desc", type=str, location="json") + .add_argument("copyright", type=str, location="json") + .add_argument("privacy_policy", type=str, location="json") + .add_argument("custom_disclaimer", type=str, location="json") + .add_argument("language", type=supported_language, required=True, nullable=False, location="json") + .add_argument("category", type=str, required=True, nullable=False, location="json") + .add_argument("position", type=int, required=True, nullable=False, location="json") + ) args = parser.parse_args() app = db.session.execute(select(App).where(App.id == args["app_id"])).scalar_one_or_none() diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py index b1e3813f33..4f04af7932 100644 --- a/api/controllers/console/apikey.py +++ b/api/controllers/console/apikey.py @@ -7,13 +7,12 @@ from werkzeug.exceptions import Forbidden from extensions.ext_database import db from libs.helper import TimestampField -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from models.dataset import Dataset from models.model import ApiToken, App from . import api, console_ns -from .wraps import account_initialization_required, setup_required +from .wraps import account_initialization_required, edit_permission_required, setup_required api_key_fields = { "id": fields.String, @@ -57,9 +56,9 @@ class BaseApiKeyListResource(Resource): def get(self, resource_id): assert self.resource_id_field is not None, "resource_id_field must be set" resource_id = str(resource_id) - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None - _get_resource(resource_id, current_user.current_tenant_id, self.resource_model) + _, current_tenant_id = current_account_with_tenant() + + _get_resource(resource_id, current_tenant_id, self.resource_model) keys = db.session.scalars( select(ApiToken).where( ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id @@ -68,15 +67,12 @@ class BaseApiKeyListResource(Resource): return {"items": keys} @marshal_with(api_key_fields) + @edit_permission_required def post(self, resource_id): assert self.resource_id_field is not None, "resource_id_field must be set" resource_id = str(resource_id) - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None - _get_resource(resource_id, current_user.current_tenant_id, self.resource_model) - if not current_user.has_edit_permission: - raise Forbidden() - + _, current_tenant_id = current_account_with_tenant() + _get_resource(resource_id, current_tenant_id, self.resource_model) current_key_count = ( db.session.query(ApiToken) .where(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id) @@ -93,7 +89,7 @@ class BaseApiKeyListResource(Resource): key = ApiToken.generate_api_key(self.token_prefix or "", 24) api_token = ApiToken() setattr(api_token, self.resource_id_field, resource_id) - api_token.tenant_id = current_user.current_tenant_id + api_token.tenant_id = current_tenant_id api_token.token = key api_token.type = self.resource_type db.session.add(api_token) @@ -112,9 +108,8 @@ class BaseApiKeyResource(Resource): assert self.resource_id_field is not None, "resource_id_field must be set" resource_id = str(resource_id) api_key_id = str(api_key_id) - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None - _get_resource(resource_id, current_user.current_tenant_id, self.resource_model) + current_user, current_tenant_id = current_account_with_tenant() + _get_resource(resource_id, current_tenant_id, self.resource_model) # The role of the current user in the ta table must be admin or owner if not current_user.is_admin_or_owner: @@ -158,11 +153,6 @@ class AppApiKeyListResource(BaseApiKeyListResource): """Create a new API key for an app""" return super().post(resource_id) - def after_request(self, resp): - resp.headers["Access-Control-Allow-Origin"] = "*" - resp.headers["Access-Control-Allow-Credentials"] = "true" - return resp - resource_type = "app" resource_model = App resource_id_field = "app_id" @@ -179,11 +169,6 @@ class AppApiKeyResource(BaseApiKeyResource): """Delete an API key for an app""" return super().delete(resource_id, api_key_id) - def after_request(self, resp): - resp.headers["Access-Control-Allow-Origin"] = "*" - resp.headers["Access-Control-Allow-Credentials"] = "true" - return resp - resource_type = "app" resource_model = App resource_id_field = "app_id" @@ -208,11 +193,6 @@ class DatasetApiKeyListResource(BaseApiKeyListResource): """Create a new API key for a dataset""" return super().post(resource_id) - def after_request(self, resp): - resp.headers["Access-Control-Allow-Origin"] = "*" - resp.headers["Access-Control-Allow-Credentials"] = "true" - return resp - resource_type = "dataset" resource_model = Dataset resource_id_field = "dataset_id" @@ -229,11 +209,6 @@ class DatasetApiKeyResource(BaseApiKeyResource): """Delete an API key for a dataset""" return super().delete(resource_id, api_key_id) - def after_request(self, resp): - resp.headers["Access-Control-Allow-Origin"] = "*" - resp.headers["Access-Control-Allow-Credentials"] = "true" - return resp - resource_type = "dataset" resource_model = Dataset resource_id_field = "dataset_id" diff --git a/api/controllers/console/app/advanced_prompt_template.py b/api/controllers/console/app/advanced_prompt_template.py index 315825db79..5885d7b447 100644 --- a/api/controllers/console/app/advanced_prompt_template.py +++ b/api/controllers/console/app/advanced_prompt_template.py @@ -25,11 +25,13 @@ class AdvancedPromptTemplateList(Resource): @login_required @account_initialization_required def get(self): - parser = reqparse.RequestParser() - parser.add_argument("app_mode", type=str, required=True, location="args") - parser.add_argument("model_mode", type=str, required=True, location="args") - parser.add_argument("has_context", type=str, required=False, default="true", location="args") - parser.add_argument("model_name", type=str, required=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("app_mode", type=str, required=True, location="args") + .add_argument("model_mode", type=str, required=True, location="args") + .add_argument("has_context", type=str, required=False, default="true", location="args") + .add_argument("model_name", type=str, required=True, location="args") + ) args = parser.parse_args() return AdvancedPromptTemplateService.get_prompt(args) diff --git a/api/controllers/console/app/agent.py b/api/controllers/console/app/agent.py index c063f336c7..717263a74d 100644 --- a/api/controllers/console/app/agent.py +++ b/api/controllers/console/app/agent.py @@ -27,9 +27,11 @@ class AgentLogApi(Resource): @get_app_model(mode=[AppMode.AGENT_CHAT]) def get(self, app_model): """Get agent logs""" - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=uuid_value, required=True, location="args") - parser.add_argument("conversation_id", type=uuid_value, required=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("message_id", type=uuid_value, required=True, location="args") + .add_argument("conversation_id", type=uuid_value, required=True, location="args") + ) args = parser.parse_args() diff --git a/api/controllers/console/app/annotation.py b/api/controllers/console/app/annotation.py index d0ee11fe75..932214058a 100644 --- a/api/controllers/console/app/annotation.py +++ b/api/controllers/console/app/annotation.py @@ -1,15 +1,14 @@ from typing import Literal from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal, marshal_with, reqparse -from werkzeug.exceptions import Forbidden from controllers.common.errors import NoFileUploadedError, TooManyFilesError from controllers.console import api, console_ns from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + edit_permission_required, setup_required, ) from extensions.ext_redis import redis_client @@ -42,15 +41,15 @@ class AnnotationReplyActionApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required def post(self, app_id, action: Literal["enable", "disable"]): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) - parser = reqparse.RequestParser() - parser.add_argument("score_threshold", required=True, type=float, location="json") - parser.add_argument("embedding_provider_name", required=True, type=str, location="json") - parser.add_argument("embedding_model_name", required=True, type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("score_threshold", required=True, type=float, location="json") + .add_argument("embedding_provider_name", required=True, type=str, location="json") + .add_argument("embedding_model_name", required=True, type=str, location="json") + ) args = parser.parse_args() if action == "enable": result = AppAnnotationService.enable_app_annotation(args, app_id) @@ -69,10 +68,8 @@ class AppAnnotationSettingDetailApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) result = AppAnnotationService.get_app_annotation_setting_by_app_id(app_id) return result, 200 @@ -98,15 +95,12 @@ class AppAnnotationSettingUpdateApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, app_id, annotation_setting_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) annotation_setting_id = str(annotation_setting_id) - parser = reqparse.RequestParser() - parser.add_argument("score_threshold", required=True, type=float, location="json") + parser = reqparse.RequestParser().add_argument("score_threshold", required=True, type=float, location="json") args = parser.parse_args() result = AppAnnotationService.update_app_annotation_setting(app_id, annotation_setting_id, args) @@ -124,10 +118,8 @@ class AnnotationReplyActionStatusApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required def get(self, app_id, job_id, action): - if not current_user.is_editor: - raise Forbidden() - job_id = str(job_id) app_annotation_job_key = f"{action}_app_annotation_job_{str(job_id)}" cache_result = redis_client.get(app_annotation_job_key) @@ -159,10 +151,8 @@ class AnnotationApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_id): - if not current_user.is_editor: - raise Forbidden() - page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) keyword = request.args.get("keyword", default="", type=str) @@ -198,14 +188,14 @@ class AnnotationApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("annotation") @marshal_with(annotation_fields) + @edit_permission_required def post(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) - parser = reqparse.RequestParser() - parser.add_argument("question", required=True, type=str, location="json") - parser.add_argument("answer", required=True, type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("question", required=True, type=str, location="json") + .add_argument("answer", required=True, type=str, location="json") + ) args = parser.parse_args() annotation = AppAnnotationService.insert_app_annotation_directly(args, app_id) return annotation @@ -213,10 +203,8 @@ class AnnotationApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def delete(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) # Use request.args.getlist to get annotation_ids array directly @@ -249,10 +237,8 @@ class AnnotationExportApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) annotation_list = AppAnnotationService.export_annotation_list_by_app_id(app_id) response = {"data": marshal(annotation_list, annotation_fields)} @@ -271,16 +257,16 @@ class AnnotationUpdateDeleteApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required @marshal_with(annotation_fields) def post(self, app_id, annotation_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) annotation_id = str(annotation_id) - parser = reqparse.RequestParser() - parser.add_argument("question", required=True, type=str, location="json") - parser.add_argument("answer", required=True, type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("question", required=True, type=str, location="json") + .add_argument("answer", required=True, type=str, location="json") + ) args = parser.parse_args() annotation = AppAnnotationService.update_app_annotation_directly(args, app_id, annotation_id) return annotation @@ -288,10 +274,8 @@ class AnnotationUpdateDeleteApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def delete(self, app_id, annotation_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) annotation_id = str(annotation_id) AppAnnotationService.delete_app_annotation(app_id, annotation_id) @@ -310,10 +294,8 @@ class AnnotationBatchImportApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required def post(self, app_id): - if not current_user.is_editor: - raise Forbidden() - app_id = str(app_id) # check file if "file" not in request.files: @@ -341,10 +323,8 @@ class AnnotationBatchImportStatusApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") + @edit_permission_required def get(self, app_id, job_id): - if not current_user.is_editor: - raise Forbidden() - job_id = str(job_id) indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}" cache_result = redis_client.get(indexing_cache_key) @@ -376,10 +356,8 @@ class AnnotationHitHistoryListApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_id, annotation_id): - if not current_user.is_editor: - raise Forbidden() - page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) app_id = str(app_id) diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index 3927685af3..17505d69b2 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -1,7 +1,5 @@ import uuid -from typing import cast -from flask_login import current_user from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session @@ -12,15 +10,16 @@ from controllers.console.app.wraps import get_app_model from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + edit_permission_required, enterprise_license_required, setup_required, ) from core.ops.ops_trace_manager import OpsTraceManager from extensions.ext_database import db from fields.app_fields import app_detail_fields, app_detail_fields_with_site, app_pagination_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from libs.validators import validate_description_length -from models import Account, App +from models import App from services.app_dsl_service import AppDslService, ImportMode from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService @@ -56,6 +55,7 @@ class AppListApi(Resource): @enterprise_license_required def get(self): """Get app list""" + current_user, current_tenant_id = current_account_with_tenant() def uuid_list(value): try: @@ -63,34 +63,36 @@ class AppListApi(Resource): except ValueError: abort(400, message="Invalid UUID format in tag_ids.") - parser = reqparse.RequestParser() - parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") - parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") - parser.add_argument( - "mode", - type=str, - choices=[ - "completion", - "chat", - "advanced-chat", - "workflow", - "agent-chat", - "channel", - "all", - ], - default="all", - location="args", - required=False, + parser = ( + reqparse.RequestParser() + .add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") + .add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + .add_argument( + "mode", + type=str, + choices=[ + "completion", + "chat", + "advanced-chat", + "workflow", + "agent-chat", + "channel", + "all", + ], + default="all", + location="args", + required=False, + ) + .add_argument("name", type=str, location="args", required=False) + .add_argument("tag_ids", type=uuid_list, location="args", required=False) + .add_argument("is_created_by_me", type=inputs.boolean, location="args", required=False) ) - parser.add_argument("name", type=str, location="args", required=False) - parser.add_argument("tag_ids", type=uuid_list, location="args", required=False) - parser.add_argument("is_created_by_me", type=inputs.boolean, location="args", required=False) args = parser.parse_args() # get app list app_service = AppService() - app_pagination = app_service.get_paginate_apps(current_user.id, current_user.current_tenant_id, args) + app_pagination = app_service.get_paginate_apps(current_user.id, current_tenant_id, args) if not app_pagination: return {"data": [], "total": 0, "page": 1, "limit": 20, "has_more": False} @@ -129,30 +131,26 @@ class AppListApi(Resource): @account_initialization_required @marshal_with(app_detail_fields) @cloud_edition_billing_resource_check("apps") + @edit_permission_required def post(self): """Create app""" - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") - parser.add_argument("description", type=validate_description_length, location="json") - parser.add_argument("mode", type=str, choices=ALLOW_CREATE_APP_MODES, location="json") - parser.add_argument("icon_type", type=str, location="json") - parser.add_argument("icon", type=str, location="json") - parser.add_argument("icon_background", type=str, location="json") + current_user, current_tenant_id = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=True, location="json") + .add_argument("description", type=validate_description_length, location="json") + .add_argument("mode", type=str, choices=ALLOW_CREATE_APP_MODES, location="json") + .add_argument("icon_type", type=str, location="json") + .add_argument("icon", type=str, location="json") + .add_argument("icon_background", type=str, location="json") + ) args = parser.parse_args() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - if "mode" not in args or args["mode"] is None: raise BadRequest("mode is required") app_service = AppService() - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") - if current_user.current_tenant_id is None: - raise ValueError("current_user.current_tenant_id cannot be None") - app = app_service.create_app(current_user.current_tenant_id, args, current_user) + app = app_service.create_app(current_tenant_id, args, current_user) return app, 201 @@ -205,21 +203,20 @@ class AppApi(Resource): @login_required @account_initialization_required @get_app_model + @edit_permission_required @marshal_with(app_detail_fields_with_site) def put(self, app_model): """Update app""" - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, nullable=False, location="json") - parser.add_argument("description", type=validate_description_length, location="json") - parser.add_argument("icon_type", type=str, location="json") - parser.add_argument("icon", type=str, location="json") - parser.add_argument("icon_background", type=str, location="json") - parser.add_argument("use_icon_as_answer_icon", type=bool, location="json") - parser.add_argument("max_active_requests", type=int, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=True, nullable=False, location="json") + .add_argument("description", type=validate_description_length, location="json") + .add_argument("icon_type", type=str, location="json") + .add_argument("icon", type=str, location="json") + .add_argument("icon_background", type=str, location="json") + .add_argument("use_icon_as_answer_icon", type=bool, location="json") + .add_argument("max_active_requests", type=int, location="json") + ) args = parser.parse_args() app_service = AppService() @@ -248,12 +245,9 @@ class AppApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def delete(self, app_model): """Delete app""" - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - app_service = AppService() app_service.delete_app(app_model) @@ -283,27 +277,28 @@ class AppCopyApi(Resource): @login_required @account_initialization_required @get_app_model + @edit_permission_required @marshal_with(app_detail_fields_with_site) def post(self, app_model): """Copy app""" # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, location="json") - parser.add_argument("description", type=validate_description_length, location="json") - parser.add_argument("icon_type", type=str, location="json") - parser.add_argument("icon", type=str, location="json") - parser.add_argument("icon_background", type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, location="json") + .add_argument("description", type=validate_description_length, location="json") + .add_argument("icon_type", type=str, location="json") + .add_argument("icon", type=str, location="json") + .add_argument("icon_background", type=str, location="json") + ) args = parser.parse_args() with Session(db.engine) as session: import_service = AppDslService(session) yaml_content = import_service.export_dsl(app_model=app_model, include_secret=True) - account = cast(Account, current_user) result = import_service.import_app( - account=account, + account=current_user, import_mode=ImportMode.YAML_CONTENT, yaml_content=yaml_content, name=args.get("name"), @@ -340,16 +335,15 @@ class AppExportApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, app_model): """Export app""" - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - # Add include_secret params - parser = reqparse.RequestParser() - parser.add_argument("include_secret", type=inputs.boolean, default=False, location="args") - parser.add_argument("workflow_id", type=str, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("include_secret", type=inputs.boolean, default=False, location="args") + .add_argument("workflow_id", type=str, location="args") + ) args = parser.parse_args() return { @@ -371,13 +365,9 @@ class AppNameApi(Resource): @account_initialization_required @get_app_model @marshal_with(app_detail_fields) + @edit_permission_required def post(self, app_model): - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") + parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json") args = parser.parse_args() app_service = AppService() @@ -408,14 +398,13 @@ class AppIconApi(Resource): @account_initialization_required @get_app_model @marshal_with(app_detail_fields) + @edit_permission_required def post(self, app_model): - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("icon", type=str, location="json") - parser.add_argument("icon_background", type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("icon", type=str, location="json") + .add_argument("icon_background", type=str, location="json") + ) args = parser.parse_args() app_service = AppService() @@ -441,13 +430,9 @@ class AppSiteStatus(Resource): @account_initialization_required @get_app_model @marshal_with(app_detail_fields) + @edit_permission_required def post(self, app_model): - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("enable_site", type=bool, required=True, location="json") + parser = reqparse.RequestParser().add_argument("enable_site", type=bool, required=True, location="json") args = parser.parse_args() app_service = AppService() @@ -475,11 +460,11 @@ class AppApiStatus(Resource): @marshal_with(app_detail_fields) def post(self, app_model): # The role of the current user in the ta table must be admin or owner + current_user, _ = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("enable_api", type=bool, required=True, location="json") + parser = reqparse.RequestParser().add_argument("enable_api", type=bool, required=True, location="json") args = parser.parse_args() app_service = AppService() @@ -520,13 +505,14 @@ class AppTraceApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, app_id): # add app trace - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("enabled", type=bool, required=True, location="json") - parser.add_argument("tracing_provider", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("enabled", type=bool, required=True, location="json") + .add_argument("tracing_provider", type=str, required=True, location="json") + ) args = parser.parse_args() OpsTraceManager.update_app_tracing_config( diff --git a/api/controllers/console/app/app_import.py b/api/controllers/console/app/app_import.py index 037561cfed..d902c129ad 100644 --- a/api/controllers/console/app/app_import.py +++ b/api/controllers/console/app/app_import.py @@ -1,20 +1,16 @@ -from typing import cast - -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from sqlalchemy.orm import Session -from werkzeug.exceptions import Forbidden from controllers.console.app.wraps import get_app_model from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + edit_permission_required, setup_required, ) from extensions.ext_database import db from fields.app_fields import app_import_check_dependencies_fields, app_import_fields -from libs.login import login_required -from models import Account +from libs.login import current_account_with_tenant, login_required from models.model import App from services.app_dsl_service import AppDslService, ImportStatus from services.enterprise.enterprise_service import EnterpriseService @@ -30,28 +26,29 @@ class AppImportApi(Resource): @account_initialization_required @marshal_with(app_import_fields) @cloud_edition_billing_resource_check("apps") + @edit_permission_required def post(self): # Check user role first - if not current_user.is_editor: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("mode", type=str, required=True, location="json") - parser.add_argument("yaml_content", type=str, location="json") - parser.add_argument("yaml_url", type=str, location="json") - parser.add_argument("name", type=str, location="json") - parser.add_argument("description", type=str, location="json") - parser.add_argument("icon_type", type=str, location="json") - parser.add_argument("icon", type=str, location="json") - parser.add_argument("icon_background", type=str, location="json") - parser.add_argument("app_id", type=str, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("mode", type=str, required=True, location="json") + .add_argument("yaml_content", type=str, location="json") + .add_argument("yaml_url", type=str, location="json") + .add_argument("name", type=str, location="json") + .add_argument("description", type=str, location="json") + .add_argument("icon_type", type=str, location="json") + .add_argument("icon", type=str, location="json") + .add_argument("icon_background", type=str, location="json") + .add_argument("app_id", type=str, location="json") + ) args = parser.parse_args() # Create service with session with Session(db.engine) as session: import_service = AppDslService(session) # Import app - account = cast(Account, current_user) + account = current_user result = import_service.import_app( account=account, import_mode=args["mode"], @@ -83,16 +80,16 @@ class AppImportConfirmApi(Resource): @login_required @account_initialization_required @marshal_with(app_import_fields) + @edit_permission_required def post(self, import_id): # Check user role first - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() # Create service with session with Session(db.engine) as session: import_service = AppDslService(session) # Confirm import - account = cast(Account, current_user) + account = current_user result = import_service.confirm_import(import_id=import_id, account=account) session.commit() @@ -109,10 +106,8 @@ class AppImportCheckDependenciesApi(Resource): @get_app_model @account_initialization_required @marshal_with(app_import_check_dependencies_fields) + @edit_permission_required def get(self, app_model: App): - if not current_user.is_editor: - raise Forbidden() - with Session(db.engine) as session: import_service = AppDslService(session) result = import_service.check_dependencies(app_model=app_model) diff --git a/api/controllers/console/app/audio.py b/api/controllers/console/app/audio.py index 7d659dae0d..8170ba271a 100644 --- a/api/controllers/console/app/audio.py +++ b/api/controllers/console/app/audio.py @@ -111,11 +111,13 @@ class ChatMessageTextApi(Resource): @account_initialization_required def post(self, app_model: App): try: - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=str, location="json") - parser.add_argument("text", type=str, location="json") - parser.add_argument("voice", type=str, location="json") - parser.add_argument("streaming", type=bool, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("message_id", type=str, location="json") + .add_argument("text", type=str, location="json") + .add_argument("voice", type=str, location="json") + .add_argument("streaming", type=bool, location="json") + ) args = parser.parse_args() message_id = args.get("message_id", None) @@ -166,8 +168,7 @@ class TextModesApi(Resource): @account_initialization_required def get(self, app_model): try: - parser = reqparse.RequestParser() - parser.add_argument("language", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("language", type=str, required=True, location="args") args = parser.parse_args() response = AudioService.transcript_tts_voices( diff --git a/api/controllers/console/app/completion.py b/api/controllers/console/app/completion.py index 2f7b90e7fb..d7bc3cc20d 100644 --- a/api/controllers/console/app/completion.py +++ b/api/controllers/console/app/completion.py @@ -2,7 +2,7 @@ import logging from flask import request from flask_restx import Resource, fields, reqparse -from werkzeug.exceptions import Forbidden, InternalServerError, NotFound +from werkzeug.exceptions import InternalServerError, NotFound import services from controllers.console import api, console_ns @@ -15,7 +15,7 @@ from controllers.console.app.error import ( ProviderQuotaExceededError, ) from controllers.console.app.wraps import get_app_model -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.entities.app_invoke_entities import InvokeFrom @@ -64,13 +64,15 @@ class CompletionMessageApi(Resource): @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) def post(self, app_model): - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, location="json", default="") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("model_config", type=dict, required=True, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json") + .add_argument("query", type=str, location="json", default="") + .add_argument("files", type=list, required=False, location="json") + .add_argument("model_config", type=dict, required=True, location="json") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") + .add_argument("retriever_from", type=str, required=False, default="dev", location="json") + ) args = parser.parse_args() streaming = args["response_mode"] != "blocking" @@ -151,22 +153,19 @@ class ChatMessageApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT]) + @edit_permission_required def post(self, app_model): - if not isinstance(current_user, Account): - raise Forbidden() - - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, required=True, location="json") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("model_config", type=dict, required=True, location="json") - parser.add_argument("conversation_id", type=uuid_value, location="json") - parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json") + .add_argument("query", type=str, required=True, location="json") + .add_argument("files", type=list, required=False, location="json") + .add_argument("model_config", type=dict, required=True, location="json") + .add_argument("conversation_id", type=uuid_value, location="json") + .add_argument("parent_message_id", type=uuid_value, required=False, location="json") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") + .add_argument("retriever_from", type=str, required=False, default="dev", location="json") + ) args = parser.parse_args() streaming = args["response_mode"] != "blocking" diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index 3b8dff613b..d5fa70d678 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -1,17 +1,16 @@ from datetime import datetime -import pytz # pip install pytz +import pytz import sqlalchemy as sa -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from flask_restx.inputs import int_range from sqlalchemy import func, or_ from sqlalchemy.orm import joinedload -from werkzeug.exceptions import Forbidden, NotFound +from werkzeug.exceptions import NotFound from controllers.console import api, console_ns from controllers.console.app.wraps import get_app_model -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from fields.conversation_fields import ( @@ -22,8 +21,8 @@ from fields.conversation_fields import ( ) from libs.datetime_utils import naive_utc_now from libs.helper import DatetimeString -from libs.login import login_required -from models import Account, Conversation, EndUser, Message, MessageAnnotation +from libs.login import current_account_with_tenant, login_required +from models import Conversation, EndUser, Message, MessageAnnotation from models.model import AppMode from services.conversation_service import ConversationService from services.errors.conversation import ConversationNotExistsError @@ -57,18 +56,24 @@ class CompletionConversationApi(Resource): @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) @marshal_with(conversation_pagination_fields) + @edit_permission_required def get(self, app_model): - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("keyword", type=str, location="args") - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument( - "annotation_status", type=str, choices=["annotated", "not_annotated", "all"], default="all", location="args" + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("keyword", type=str, location="args") + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument( + "annotation_status", + type=str, + choices=["annotated", "not_annotated", "all"], + default="all", + location="args", + ) + .add_argument("page", type=int_range(1, 99999), default=1, location="args") + .add_argument("limit", type=int_range(1, 100), default=20, location="args") ) - parser.add_argument("page", type=int_range(1, 99999), default=1, location="args") - parser.add_argument("limit", type=int_range(1, 100), default=20, location="args") args = parser.parse_args() query = sa.select(Conversation).where( @@ -84,6 +89,7 @@ class CompletionConversationApi(Resource): ) account = current_user + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -137,9 +143,8 @@ class CompletionConversationDetailApi(Resource): @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) @marshal_with(conversation_message_detail_fields) + @edit_permission_required def get(self, app_model, conversation_id): - if not current_user.is_editor: - raise Forbidden() conversation_id = str(conversation_id) return _get_conversation(app_model, conversation_id) @@ -154,14 +159,12 @@ class CompletionConversationDetailApi(Resource): @login_required @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) + @edit_permission_required def delete(self, app_model, conversation_id): - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() conversation_id = str(conversation_id) try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") ConversationService.delete(app_model, conversation_id, current_user) except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") @@ -206,26 +209,32 @@ class ChatConversationApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @marshal_with(conversation_with_summary_pagination_fields) + @edit_permission_required def get(self, app_model): - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("keyword", type=str, location="args") - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument( - "annotation_status", type=str, choices=["annotated", "not_annotated", "all"], default="all", location="args" - ) - parser.add_argument("message_count_gte", type=int_range(1, 99999), required=False, location="args") - parser.add_argument("page", type=int_range(1, 99999), required=False, default=1, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - parser.add_argument( - "sort_by", - type=str, - choices=["created_at", "-created_at", "updated_at", "-updated_at"], - required=False, - default="-updated_at", - location="args", + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("keyword", type=str, location="args") + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument( + "annotation_status", + type=str, + choices=["annotated", "not_annotated", "all"], + default="all", + location="args", + ) + .add_argument("message_count_gte", type=int_range(1, 99999), required=False, location="args") + .add_argument("page", type=int_range(1, 99999), required=False, default=1, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + .add_argument( + "sort_by", + type=str, + choices=["created_at", "-created_at", "updated_at", "-updated_at"], + required=False, + default="-updated_at", + location="args", + ) ) args = parser.parse_args() @@ -260,6 +269,7 @@ class ChatConversationApi(Resource): ) account = current_user + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -341,9 +351,8 @@ class ChatConversationDetailApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @marshal_with(conversation_detail_fields) + @edit_permission_required def get(self, app_model, conversation_id): - if not current_user.is_editor: - raise Forbidden() conversation_id = str(conversation_id) return _get_conversation(app_model, conversation_id) @@ -358,14 +367,12 @@ class ChatConversationDetailApi(Resource): @login_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @account_initialization_required + @edit_permission_required def delete(self, app_model, conversation_id): - if not current_user.is_editor: - raise Forbidden() + current_user, _ = current_account_with_tenant() conversation_id = str(conversation_id) try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") ConversationService.delete(app_model, conversation_id, current_user) except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") @@ -374,6 +381,7 @@ class ChatConversationDetailApi(Resource): def _get_conversation(app_model, conversation_id): + current_user, _ = current_account_with_tenant() conversation = ( db.session.query(Conversation) .where(Conversation.id == conversation_id, Conversation.app_id == app_model.id) diff --git a/api/controllers/console/app/conversation_variables.py b/api/controllers/console/app/conversation_variables.py index 8a65a89963..d4c0b5697f 100644 --- a/api/controllers/console/app/conversation_variables.py +++ b/api/controllers/console/app/conversation_variables.py @@ -29,8 +29,7 @@ class ConversationVariablesApi(Resource): @get_app_model(mode=AppMode.ADVANCED_CHAT) @marshal_with(paginated_conversation_variable_fields) def get(self, app_model): - parser = reqparse.RequestParser() - parser.add_argument("conversation_id", type=str, location="args") + parser = reqparse.RequestParser().add_argument("conversation_id", type=str, location="args") args = parser.parse_args() stmt = ( diff --git a/api/controllers/console/app/generator.py b/api/controllers/console/app/generator.py index 230ccdca15..b6ca97ab4f 100644 --- a/api/controllers/console/app/generator.py +++ b/api/controllers/console/app/generator.py @@ -1,6 +1,5 @@ from collections.abc import Sequence -from flask_login import current_user from flask_restx import Resource, fields, reqparse from controllers.console import api, console_ns @@ -17,7 +16,7 @@ from core.helper.code_executor.python3.python3_code_provider import Python3CodeP from core.llm_generator.llm_generator import LLMGenerator from core.model_runtime.errors.invoke import InvokeError from extensions.ext_database import db -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import App from services.workflow_service import WorkflowService @@ -43,16 +42,18 @@ class RuleGenerateApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("instruction", type=str, required=True, nullable=False, location="json") - parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") - parser.add_argument("no_variable", type=bool, required=True, default=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("instruction", type=str, required=True, nullable=False, location="json") + .add_argument("model_config", type=dict, required=True, nullable=False, location="json") + .add_argument("no_variable", type=bool, required=True, default=False, location="json") + ) args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() - account = current_user try: rules = LLMGenerator.generate_rule_config( - tenant_id=account.current_tenant_id, + tenant_id=current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], no_variable=args["no_variable"], @@ -93,17 +94,19 @@ class RuleCodeGenerateApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("instruction", type=str, required=True, nullable=False, location="json") - parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") - parser.add_argument("no_variable", type=bool, required=True, default=False, location="json") - parser.add_argument("code_language", type=str, required=False, default="javascript", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("instruction", type=str, required=True, nullable=False, location="json") + .add_argument("model_config", type=dict, required=True, nullable=False, location="json") + .add_argument("no_variable", type=bool, required=True, default=False, location="json") + .add_argument("code_language", type=str, required=False, default="javascript", location="json") + ) args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() - account = current_user try: code_result = LLMGenerator.generate_code( - tenant_id=account.current_tenant_id, + tenant_id=current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], code_language=args["code_language"], @@ -140,15 +143,17 @@ class RuleStructuredOutputGenerateApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("instruction", type=str, required=True, nullable=False, location="json") - parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("instruction", type=str, required=True, nullable=False, location="json") + .add_argument("model_config", type=dict, required=True, nullable=False, location="json") + ) args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() - account = current_user try: structured_output = LLMGenerator.generate_structured_output( - tenant_id=account.current_tenant_id, + tenant_id=current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], ) @@ -189,15 +194,18 @@ class InstructionGenerateApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("flow_id", type=str, required=True, default="", location="json") - parser.add_argument("node_id", type=str, required=False, default="", location="json") - parser.add_argument("current", type=str, required=False, default="", location="json") - parser.add_argument("language", type=str, required=False, default="javascript", location="json") - parser.add_argument("instruction", type=str, required=True, nullable=False, location="json") - parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") - parser.add_argument("ideal_output", type=str, required=False, default="", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("flow_id", type=str, required=True, default="", location="json") + .add_argument("node_id", type=str, required=False, default="", location="json") + .add_argument("current", type=str, required=False, default="", location="json") + .add_argument("language", type=str, required=False, default="javascript", location="json") + .add_argument("instruction", type=str, required=True, nullable=False, location="json") + .add_argument("model_config", type=dict, required=True, nullable=False, location="json") + .add_argument("ideal_output", type=str, required=False, default="", location="json") + ) args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() code_template = ( Python3CodeProvider.get_default_code() if args["language"] == "python" @@ -222,21 +230,21 @@ class InstructionGenerateApi(Resource): match node_type: case "llm": return LLMGenerator.generate_rule_config( - current_user.current_tenant_id, + current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], no_variable=True, ) case "agent": return LLMGenerator.generate_rule_config( - current_user.current_tenant_id, + current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], no_variable=True, ) case "code": return LLMGenerator.generate_code( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], code_language=args["language"], @@ -245,7 +253,7 @@ class InstructionGenerateApi(Resource): return {"error": f"invalid node type: {node_type}"} if args["node_id"] == "" and args["current"] != "": # For legacy app without a workflow return LLMGenerator.instruction_modify_legacy( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, flow_id=args["flow_id"], current=args["current"], instruction=args["instruction"], @@ -254,7 +262,7 @@ class InstructionGenerateApi(Resource): ) if args["node_id"] != "" and args["current"] != "": # For workflow node return LLMGenerator.instruction_modify_workflow( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, flow_id=args["flow_id"], node_id=args["node_id"], current=args["current"], @@ -293,8 +301,7 @@ class InstructionGenerationTemplateApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, default=False, location="json") + parser = reqparse.RequestParser().add_argument("type", type=str, required=True, default=False, location="json") args = parser.parse_args() match args["type"]: case "prompt": diff --git a/api/controllers/console/app/mcp_server.py b/api/controllers/console/app/mcp_server.py index b9a383ee61..3700c6b1d0 100644 --- a/api/controllers/console/app/mcp_server.py +++ b/api/controllers/console/app/mcp_server.py @@ -1,16 +1,15 @@ import json from enum import StrEnum -from flask_login import current_user from flask_restx import Resource, fields, marshal_with, reqparse from werkzeug.exceptions import NotFound from controllers.console import api, console_ns from controllers.console.app.wraps import get_app_model -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from extensions.ext_database import db from fields.app_fields import app_server_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.model import AppMCPServer @@ -25,9 +24,9 @@ class AppMCPServerController(Resource): @api.doc(description="Get MCP server configuration for an application") @api.doc(params={"app_id": "Application ID"}) @api.response(200, "MCP server configuration retrieved successfully", app_server_fields) - @setup_required @login_required @account_initialization_required + @setup_required @get_app_model @marshal_with(app_server_fields) def get(self, app_model): @@ -48,17 +47,19 @@ class AppMCPServerController(Resource): ) @api.response(201, "MCP server configuration created successfully", app_server_fields) @api.response(403, "Insufficient permissions") - @setup_required - @login_required @account_initialization_required @get_app_model + @login_required + @setup_required @marshal_with(app_server_fields) + @edit_permission_required def post(self, app_model): - if not current_user.is_editor: - raise NotFound() - parser = reqparse.RequestParser() - parser.add_argument("description", type=str, required=False, location="json") - parser.add_argument("parameters", type=dict, required=True, location="json") + _, current_tenant_id = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("description", type=str, required=False, location="json") + .add_argument("parameters", type=dict, required=True, location="json") + ) args = parser.parse_args() description = args.get("description") @@ -71,7 +72,7 @@ class AppMCPServerController(Resource): parameters=json.dumps(args["parameters"], ensure_ascii=False), status=AppMCPServerStatus.ACTIVE, app_id=app_model.id, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, server_code=AppMCPServer.generate_server_code(16), ) db.session.add(server) @@ -95,19 +96,20 @@ class AppMCPServerController(Resource): @api.response(200, "MCP server configuration updated successfully", app_server_fields) @api.response(403, "Insufficient permissions") @api.response(404, "Server not found") - @setup_required - @login_required - @account_initialization_required @get_app_model + @login_required + @setup_required + @account_initialization_required @marshal_with(app_server_fields) + @edit_permission_required def put(self, app_model): - if not current_user.is_editor: - raise NotFound() - parser = reqparse.RequestParser() - parser.add_argument("id", type=str, required=True, location="json") - parser.add_argument("description", type=str, required=False, location="json") - parser.add_argument("parameters", type=dict, required=True, location="json") - parser.add_argument("status", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("id", type=str, required=True, location="json") + .add_argument("description", type=str, required=False, location="json") + .add_argument("parameters", type=dict, required=True, location="json") + .add_argument("status", type=str, required=False, location="json") + ) args = parser.parse_args() server = db.session.query(AppMCPServer).where(AppMCPServer.id == args["id"]).first() if not server: @@ -142,13 +144,13 @@ class AppMCPServerRefreshController(Resource): @login_required @account_initialization_required @marshal_with(app_server_fields) + @edit_permission_required def get(self, server_id): - if not current_user.is_editor: - raise NotFound() + _, current_tenant_id = current_account_with_tenant() server = ( db.session.query(AppMCPServer) .where(AppMCPServer.id == server_id) - .where(AppMCPServer.tenant_id == current_user.current_tenant_id) + .where(AppMCPServer.tenant_id == current_tenant_id) .first() ) if not server: diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py index 46523feccc..7e0ae370ef 100644 --- a/api/controllers/console/app/message.py +++ b/api/controllers/console/app/message.py @@ -3,7 +3,7 @@ import logging from flask_restx import Resource, fields, marshal_with, reqparse from flask_restx.inputs import int_range from sqlalchemy import exists, select -from werkzeug.exceptions import Forbidden, InternalServerError, NotFound +from werkzeug.exceptions import InternalServerError, NotFound from controllers.console import api, console_ns from controllers.console.app.error import ( @@ -17,6 +17,7 @@ from controllers.console.explore.error import AppSuggestedQuestionsAfterAnswerDi from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, + edit_permission_required, setup_required, ) from core.app.entities.app_invoke_entities import InvokeFrom @@ -26,8 +27,7 @@ from extensions.ext_database import db from fields.conversation_fields import annotation_fields, message_detail_fields from libs.helper import uuid_value from libs.infinite_scroll_pagination import InfiniteScrollPagination -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from models.model import AppMode, Conversation, Message, MessageAnnotation, MessageFeedback from services.annotation_service import AppAnnotationService from services.errors.conversation import ConversationNotExistsError @@ -56,19 +56,19 @@ class ChatMessageListApi(Resource): ) @api.response(200, "Success", message_infinite_scroll_pagination_fields) @api.response(404, "Conversation not found") - @setup_required @login_required - @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @account_initialization_required + @setup_required + @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) @marshal_with(message_infinite_scroll_pagination_fields) + @edit_permission_required def get(self, app_model): - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("conversation_id", required=True, type=uuid_value, location="args") - parser.add_argument("first_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("conversation_id", required=True, type=uuid_value, location="args") + .add_argument("first_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() conversation = ( @@ -154,12 +154,13 @@ class MessageFeedbackApi(Resource): @login_required @account_initialization_required def post(self, app_model): - if current_user is None: - raise Forbidden() + current_user, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("message_id", required=True, type=uuid_value, location="json") - parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json") + parser = ( + reqparse.RequestParser() + .add_argument("message_id", required=True, type=uuid_value, location="json") + .add_argument("rating", type=str, choices=["like", "dislike", None], location="json") + ) args = parser.parse_args() message_id = str(args["message_id"]) @@ -211,23 +212,21 @@ class MessageAnnotationApi(Resource): ) @api.response(200, "Annotation created successfully", annotation_fields) @api.response(403, "Insufficient permissions") + @marshal_with(annotation_fields) + @get_app_model @setup_required @login_required - @account_initialization_required @cloud_edition_billing_resource_check("annotation") - @get_app_model - @marshal_with(annotation_fields) + @account_initialization_required + @edit_permission_required def post(self, app_model): - if not isinstance(current_user, Account): - raise Forbidden() - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("message_id", required=False, type=uuid_value, location="json") - parser.add_argument("question", required=True, type=str, location="json") - parser.add_argument("answer", required=True, type=str, location="json") - parser.add_argument("annotation_reply", required=False, type=dict, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("message_id", required=False, type=uuid_value, location="json") + .add_argument("question", required=True, type=str, location="json") + .add_argument("answer", required=True, type=str, location="json") + .add_argument("annotation_reply", required=False, type=dict, location="json") + ) args = parser.parse_args() annotation = AppAnnotationService.up_insert_app_annotation_from_message(args, app_model.id) @@ -270,6 +269,7 @@ class MessageSuggestedQuestionApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) def get(self, app_model, message_id): + current_user, _ = current_account_with_tenant() message_id = str(message_id) try: @@ -304,12 +304,12 @@ class MessageApi(Resource): @api.doc(params={"app_id": "Application ID", "message_id": "Message ID"}) @api.response(200, "Message retrieved successfully", message_detail_fields) @api.response(404, "Message not found") + @get_app_model @setup_required @login_required @account_initialization_required - @get_app_model @marshal_with(message_detail_fields) - def get(self, app_model, message_id): + def get(self, app_model, message_id: str): message_id = str(message_id) message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app_model.id).first() diff --git a/api/controllers/console/app/model_config.py b/api/controllers/console/app/model_config.py index fa6e3f8738..72ce8a7ddf 100644 --- a/api/controllers/console/app/model_config.py +++ b/api/controllers/console/app/model_config.py @@ -2,7 +2,6 @@ import json from typing import cast from flask import request -from flask_login import current_user from flask_restx import Resource, fields from werkzeug.exceptions import Forbidden @@ -15,8 +14,7 @@ from core.tools.utils.configuration import ToolParameterConfigurationManager from events.app_event import app_model_config_was_updated from extensions.ext_database import db from libs.datetime_utils import naive_utc_now -from libs.login import login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from models.model import AppMode, AppModelConfig from services.app_model_config_service import AppModelConfigService @@ -54,16 +52,14 @@ class ModelConfigResource(Resource): @get_app_model(mode=[AppMode.AGENT_CHAT, AppMode.CHAT, AppMode.COMPLETION]) def post(self, app_model): """Modify app model config""" - if not isinstance(current_user, Account): - raise Forbidden() + current_user, current_tenant_id = current_account_with_tenant() if not current_user.has_edit_permission: raise Forbidden() - assert current_user.current_tenant_id is not None, "The tenant information should be loaded." # validate config model_configuration = AppModelConfigService.validate_configuration( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, config=cast(dict, request.json), app_mode=AppMode.value_of(app_model.mode), ) @@ -95,12 +91,12 @@ class ModelConfigResource(Resource): # get tool try: tool_runtime = ToolManager.get_agent_tool_runtime( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, app_id=app_model.id, agent_tool=agent_tool_entity, ) manager = ToolParameterConfigurationManager( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, tool_runtime=tool_runtime, provider_name=agent_tool_entity.provider_id, provider_type=agent_tool_entity.provider_type, @@ -134,7 +130,7 @@ class ModelConfigResource(Resource): else: try: tool_runtime = ToolManager.get_agent_tool_runtime( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, app_id=app_model.id, agent_tool=agent_tool_entity, ) @@ -142,7 +138,7 @@ class ModelConfigResource(Resource): continue manager = ToolParameterConfigurationManager( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, tool_runtime=tool_runtime, provider_name=agent_tool_entity.provider_id, provider_type=agent_tool_entity.provider_type, diff --git a/api/controllers/console/app/ops_trace.py b/api/controllers/console/app/ops_trace.py index 981974e842..1d80314774 100644 --- a/api/controllers/console/app/ops_trace.py +++ b/api/controllers/console/app/ops_trace.py @@ -30,8 +30,7 @@ class TraceAppConfigApi(Resource): @login_required @account_initialization_required def get(self, app_id): - parser = reqparse.RequestParser() - parser.add_argument("tracing_provider", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("tracing_provider", type=str, required=True, location="args") args = parser.parse_args() try: @@ -63,9 +62,11 @@ class TraceAppConfigApi(Resource): @account_initialization_required def post(self, app_id): """Create a new trace app configuration""" - parser = reqparse.RequestParser() - parser.add_argument("tracing_provider", type=str, required=True, location="json") - parser.add_argument("tracing_config", type=dict, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("tracing_provider", type=str, required=True, location="json") + .add_argument("tracing_config", type=dict, required=True, location="json") + ) args = parser.parse_args() try: @@ -99,9 +100,11 @@ class TraceAppConfigApi(Resource): @account_initialization_required def patch(self, app_id): """Update an existing trace app configuration""" - parser = reqparse.RequestParser() - parser.add_argument("tracing_provider", type=str, required=True, location="json") - parser.add_argument("tracing_config", type=dict, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("tracing_provider", type=str, required=True, location="json") + .add_argument("tracing_config", type=dict, required=True, location="json") + ) args = parser.parse_args() try: @@ -129,8 +132,7 @@ class TraceAppConfigApi(Resource): @account_initialization_required def delete(self, app_id): """Delete an existing trace app configuration""" - parser = reqparse.RequestParser() - parser.add_argument("tracing_provider", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("tracing_provider", type=str, required=True, location="args") args = parser.parse_args() try: diff --git a/api/controllers/console/app/site.py b/api/controllers/console/app/site.py index 95befc5df9..c4d640bf0e 100644 --- a/api/controllers/console/app/site.py +++ b/api/controllers/console/app/site.py @@ -1,4 +1,3 @@ -from flask_login import current_user from flask_restx import Resource, fields, marshal_with, reqparse from werkzeug.exceptions import Forbidden, NotFound @@ -9,30 +8,36 @@ from controllers.console.wraps import account_initialization_required, setup_req from extensions.ext_database import db from fields.app_fields import app_site_fields from libs.datetime_utils import naive_utc_now -from libs.login import login_required -from models import Account, Site +from libs.login import current_account_with_tenant, login_required +from models import Site def parse_app_site_args(): - parser = reqparse.RequestParser() - parser.add_argument("title", type=str, required=False, location="json") - parser.add_argument("icon_type", type=str, required=False, location="json") - parser.add_argument("icon", type=str, required=False, location="json") - parser.add_argument("icon_background", type=str, required=False, location="json") - parser.add_argument("description", type=str, required=False, location="json") - parser.add_argument("default_language", type=supported_language, required=False, location="json") - parser.add_argument("chat_color_theme", type=str, required=False, location="json") - parser.add_argument("chat_color_theme_inverted", type=bool, required=False, location="json") - parser.add_argument("customize_domain", type=str, required=False, location="json") - parser.add_argument("copyright", type=str, required=False, location="json") - parser.add_argument("privacy_policy", type=str, required=False, location="json") - parser.add_argument("custom_disclaimer", type=str, required=False, location="json") - parser.add_argument( - "customize_token_strategy", type=str, choices=["must", "allow", "not_allow"], required=False, location="json" + parser = ( + reqparse.RequestParser() + .add_argument("title", type=str, required=False, location="json") + .add_argument("icon_type", type=str, required=False, location="json") + .add_argument("icon", type=str, required=False, location="json") + .add_argument("icon_background", type=str, required=False, location="json") + .add_argument("description", type=str, required=False, location="json") + .add_argument("default_language", type=supported_language, required=False, location="json") + .add_argument("chat_color_theme", type=str, required=False, location="json") + .add_argument("chat_color_theme_inverted", type=bool, required=False, location="json") + .add_argument("customize_domain", type=str, required=False, location="json") + .add_argument("copyright", type=str, required=False, location="json") + .add_argument("privacy_policy", type=str, required=False, location="json") + .add_argument("custom_disclaimer", type=str, required=False, location="json") + .add_argument( + "customize_token_strategy", + type=str, + choices=["must", "allow", "not_allow"], + required=False, + location="json", + ) + .add_argument("prompt_public", type=bool, required=False, location="json") + .add_argument("show_workflow_steps", type=bool, required=False, location="json") + .add_argument("use_icon_as_answer_icon", type=bool, required=False, location="json") ) - parser.add_argument("prompt_public", type=bool, required=False, location="json") - parser.add_argument("show_workflow_steps", type=bool, required=False, location="json") - parser.add_argument("use_icon_as_answer_icon", type=bool, required=False, location="json") return parser.parse_args() @@ -76,9 +81,10 @@ class AppSite(Resource): @marshal_with(app_site_fields) def post(self, app_model): args = parse_app_site_args() + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be editor, admin, or owner - if not current_user.is_editor: + if not current_user.has_edit_permission: raise Forbidden() site = db.session.query(Site).where(Site.app_id == app_model.id).first() @@ -107,8 +113,6 @@ class AppSite(Resource): if value is not None: setattr(site, attr_name, value) - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") site.updated_by = current_user.id site.updated_at = naive_utc_now() db.session.commit() @@ -131,6 +135,8 @@ class AppSiteAccessTokenReset(Resource): @marshal_with(app_site_fields) def post(self, app_model): # The role of the current user in the ta table must be admin or owner + current_user, _ = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() @@ -140,8 +146,6 @@ class AppSiteAccessTokenReset(Resource): raise NotFound site.code = Site.generate_code(16) - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") site.updated_by = current_user.id site.updated_at = naive_utc_now() db.session.commit() diff --git a/api/controllers/console/app/statistic.py b/api/controllers/console/app/statistic.py index 5974395c6a..0917a6e53c 100644 --- a/api/controllers/console/app/statistic.py +++ b/api/controllers/console/app/statistic.py @@ -4,7 +4,6 @@ from decimal import Decimal import pytz import sqlalchemy as sa from flask import jsonify -from flask_login import current_user from flask_restx import Resource, fields, reqparse from controllers.console import api, console_ns @@ -13,7 +12,7 @@ from controllers.console.wraps import account_initialization_required, setup_req from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from libs.helper import DatetimeString -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import AppMode, Message @@ -37,11 +36,13 @@ class DailyMessageStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -53,6 +54,7 @@ WHERE app_id = :app_id AND invoke_from != :invoke_from""" arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -109,13 +111,15 @@ class DailyConversationStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -175,11 +179,13 @@ class DailyTerminalsStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -191,7 +197,7 @@ WHERE app_id = :app_id AND invoke_from != :invoke_from""" arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -247,11 +253,13 @@ class DailyTokenCostStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -264,7 +272,7 @@ WHERE app_id = :app_id AND invoke_from != :invoke_from""" arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -322,11 +330,13 @@ class AverageSessionInteractionStatistic(Resource): @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT]) def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -346,7 +356,7 @@ FROM c.app_id = :app_id AND m.invoke_from != :invoke_from""" arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -413,11 +423,13 @@ class UserSatisfactionRateStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -433,7 +445,7 @@ WHERE m.app_id = :app_id AND m.invoke_from != :invoke_from""" arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -494,11 +506,13 @@ class AverageResponseTimeStatistic(Resource): @account_initialization_required @get_app_model(mode=AppMode.COMPLETION) def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -510,7 +524,7 @@ WHERE app_id = :app_id AND invoke_from != :invoke_from""" arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -566,11 +580,13 @@ class TokensPerSecondStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -585,7 +601,7 @@ WHERE app_id = :app_id AND invoke_from != :invoke_from""" arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER} - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index 578d864b80..56771ed420 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -12,7 +12,7 @@ import services from controllers.console import api, console_ns from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync from controllers.console.app.wraps import get_app_model -from controllers.console.wraps import account_initialization_required, setup_required +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError from core.app.app_config.features.file_upload.manager import FileUploadConfigManager from core.app.apps.base_app_queue_manager import AppQueueManager @@ -27,9 +27,8 @@ from fields.workflow_run_fields import workflow_run_node_execution_fields from libs import helper from libs.datetime_utils import naive_utc_now from libs.helper import TimestampField, uuid_value -from libs.login import current_user, login_required +from libs.login import current_account_with_tenant, login_required from models import App -from models.account import Account from models.model import AppMode from models.workflow import Workflow from services.app_generate_service import AppGenerateService @@ -70,15 +69,11 @@ class DraftWorkflowApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_fields) + @edit_permission_required def get(self, app_model: App): """ Get draft workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - assert isinstance(current_user, Account) - if not current_user.has_edit_permission: - raise Forbidden() - # fetch draft workflow by app_model workflow_service = WorkflowService() workflow = workflow_service.get_draft_workflow(app_model=app_model) @@ -110,24 +105,24 @@ class DraftWorkflowApi(Resource): @api.response(200, "Draft workflow synced successfully", workflow_fields) @api.response(400, "Invalid workflow configuration") @api.response(403, "Permission denied") + @edit_permission_required def post(self, app_model: App): """ Sync draft workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - assert isinstance(current_user, Account) - if not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() content_type = request.headers.get("Content-Type", "") if "application/json" in content_type: - parser = reqparse.RequestParser() - parser.add_argument("graph", type=dict, required=True, nullable=False, location="json") - parser.add_argument("features", type=dict, required=True, nullable=False, location="json") - parser.add_argument("hash", type=str, required=False, location="json") - parser.add_argument("environment_variables", type=list, required=True, location="json") - parser.add_argument("conversation_variables", type=list, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("graph", type=dict, required=True, nullable=False, location="json") + .add_argument("features", type=dict, required=True, nullable=False, location="json") + .add_argument("hash", type=str, required=False, location="json") + .add_argument("environment_variables", type=list, required=True, location="json") + .add_argument("conversation_variables", type=list, required=False, location="json") + ) args = parser.parse_args() elif "text/plain" in content_type: try: @@ -149,10 +144,6 @@ class DraftWorkflowApi(Resource): return {"message": "Invalid JSON data"}, 400 else: abort(415) - - if not isinstance(current_user, Account): - raise Forbidden() - workflow_service = WorkflowService() try: @@ -206,24 +197,21 @@ class AdvancedChatDraftWorkflowRunApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT]) + @edit_permission_required def post(self, app_model: App): """ Run draft workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - assert isinstance(current_user, Account) - if not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() - if not isinstance(current_user, Account): - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") - parser.add_argument("query", type=str, required=True, location="json", default="") - parser.add_argument("files", type=list, location="json") - parser.add_argument("conversation_id", type=uuid_value, location="json") - parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, location="json") + .add_argument("query", type=str, required=True, location="json", default="") + .add_argument("files", type=list, location="json") + .add_argument("conversation_id", type=uuid_value, location="json") + .add_argument("parent_message_id", type=uuid_value, required=False, location="json") + ) args = parser.parse_args() @@ -271,18 +259,13 @@ class AdvancedChatDraftRunIterationNodeApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT]) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow iteration node """ - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json") args = parser.parse_args() try: @@ -323,18 +306,13 @@ class WorkflowDraftRunIterationNodeApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow iteration node """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account): - raise Forbidden() - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json") args = parser.parse_args() try: @@ -375,19 +353,13 @@ class AdvancedChatDraftRunLoopNodeApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT]) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow loop node """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json") args = parser.parse_args() try: @@ -428,19 +400,13 @@ class WorkflowDraftRunLoopNodeApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow loop node """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json") args = parser.parse_args() try: @@ -480,20 +446,17 @@ class DraftWorkflowRunApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App): """ Run draft workflow """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("files", type=list, required=False, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("files", type=list, required=False, location="json") + ) args = parser.parse_args() external_trace_id = get_external_trace_id(request) @@ -526,17 +489,11 @@ class WorkflowTaskStopApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App, task_id: str): """ Stop workflow task """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - # Stop using both mechanisms for backward compatibility # Legacy stop flag mechanism (without user check) AppQueueManager.set_stop_flag_no_user_check(task_id) @@ -568,21 +525,18 @@ class DraftWorkflowNodeRunApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_run_node_execution_fields) + @edit_permission_required def post(self, app_model: App, node_id: str): """ Run draft workflow node """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("query", type=str, required=False, location="json", default="") - parser.add_argument("files", type=list, location="json", default=[]) + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("query", type=str, required=False, location="json", default="") + .add_argument("files", type=list, location="json", default=[]) + ) args = parser.parse_args() user_inputs = args.get("inputs") @@ -622,17 +576,11 @@ class PublishedWorkflowApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_fields) + @edit_permission_required def get(self, app_model: App): """ Get published workflow """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - # fetch published workflow by app_model workflow_service = WorkflowService() workflow = workflow_service.get_published_workflow(app_model=app_model) @@ -644,19 +592,17 @@ class PublishedWorkflowApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def post(self, app_model: App): """ Publish workflow """ - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("marked_name", type=str, required=False, default="", location="json") - parser.add_argument("marked_comment", type=str, required=False, default="", location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("marked_name", type=str, required=False, default="", location="json") + .add_argument("marked_comment", type=str, required=False, default="", location="json") + ) args = parser.parse_args() # Validate name and comment length @@ -702,17 +648,11 @@ class DefaultBlockConfigsApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def get(self, app_model: App): """ Get default block config """ - - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - # Get default block configs workflow_service = WorkflowService() return workflow_service.get_default_block_configs() @@ -729,18 +669,12 @@ class DefaultBlockConfigApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def get(self, app_model: App, block_type: str): """ Get default block config """ - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("q", type=str, location="args") + parser = reqparse.RequestParser().add_argument("q", type=str, location="args") args = parser.parse_args() q = args.get("q") @@ -769,24 +703,23 @@ class ConvertToWorkflowApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.CHAT, AppMode.COMPLETION]) + @edit_permission_required def post(self, app_model: App): """ Convert basic mode of chatbot app to workflow mode Convert expert mode of chatbot app to workflow mode Convert Completion App to Workflow App """ - if not isinstance(current_user, Account): - raise Forbidden() - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() if request.data: - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=False, nullable=True, location="json") - parser.add_argument("icon_type", type=str, required=False, nullable=True, location="json") - parser.add_argument("icon", type=str, required=False, nullable=True, location="json") - parser.add_argument("icon_background", type=str, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=False, nullable=True, location="json") + .add_argument("icon_type", type=str, required=False, nullable=True, location="json") + .add_argument("icon", type=str, required=False, nullable=True, location="json") + .add_argument("icon_background", type=str, required=False, nullable=True, location="json") + ) args = parser.parse_args() else: args = {} @@ -812,21 +745,20 @@ class PublishedAllWorkflowApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_pagination_fields) + @edit_permission_required def get(self, app_model: App): """ Get published workflows """ + current_user, _ = current_account_with_tenant() - if not isinstance(current_user, Account): - raise Forbidden() - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") - parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") - parser.add_argument("user_id", type=str, required=False, location="args") - parser.add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") + .add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + .add_argument("user_id", type=str, required=False, location="args") + .add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args") + ) args = parser.parse_args() page = int(args.get("page", 1)) limit = int(args.get("limit", 10)) @@ -879,19 +811,17 @@ class WorkflowByIdApi(Resource): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) @marshal_with(workflow_fields) + @edit_permission_required def patch(self, app_model: App, workflow_id: str): """ Update workflow attributes """ - if not isinstance(current_user, Account): - raise Forbidden() - # Check permission - if not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("marked_name", type=str, required=False, location="json") - parser.add_argument("marked_comment", type=str, required=False, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("marked_name", type=str, required=False, location="json") + .add_argument("marked_comment", type=str, required=False, location="json") + ) args = parser.parse_args() # Validate name and comment length @@ -934,16 +864,11 @@ class WorkflowByIdApi(Resource): @login_required @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @edit_permission_required def delete(self, app_model: App, workflow_id: str): """ Delete workflow """ - if not isinstance(current_user, Account): - raise Forbidden() - # Check permission - if not current_user.has_edit_permission: - raise Forbidden() - workflow_service = WorkflowService() # Create a session and manage the transaction diff --git a/api/controllers/console/app/workflow_app_log.py b/api/controllers/console/app/workflow_app_log.py index 8e24be4fa7..cbf4e84ff0 100644 --- a/api/controllers/console/app/workflow_app_log.py +++ b/api/controllers/console/app/workflow_app_log.py @@ -42,33 +42,35 @@ class WorkflowAppLogApi(Resource): """ Get workflow app logs """ - parser = reqparse.RequestParser() - parser.add_argument("keyword", type=str, location="args") - parser.add_argument( - "status", type=str, choices=["succeeded", "failed", "stopped", "partial-succeeded"], location="args" + parser = ( + reqparse.RequestParser() + .add_argument("keyword", type=str, location="args") + .add_argument( + "status", type=str, choices=["succeeded", "failed", "stopped", "partial-succeeded"], location="args" + ) + .add_argument( + "created_at__before", type=str, location="args", help="Filter logs created before this timestamp" + ) + .add_argument( + "created_at__after", type=str, location="args", help="Filter logs created after this timestamp" + ) + .add_argument( + "created_by_end_user_session_id", + type=str, + location="args", + required=False, + default=None, + ) + .add_argument( + "created_by_account", + type=str, + location="args", + required=False, + default=None, + ) + .add_argument("page", type=int_range(1, 99999), default=1, location="args") + .add_argument("limit", type=int_range(1, 100), default=20, location="args") ) - parser.add_argument( - "created_at__before", type=str, location="args", help="Filter logs created before this timestamp" - ) - parser.add_argument( - "created_at__after", type=str, location="args", help="Filter logs created after this timestamp" - ) - parser.add_argument( - "created_by_end_user_session_id", - type=str, - location="args", - required=False, - default=None, - ) - parser.add_argument( - "created_by_account", - type=str, - location="args", - required=False, - default=None, - ) - parser.add_argument("page", type=int_range(1, 99999), default=1, location="args") - parser.add_argument("limit", type=int_range(1, 100), default=20, location="args") args = parser.parse_args() args.status = WorkflowExecutionStatus(args.status) if args.status else None diff --git a/api/controllers/console/app/workflow_draft_variable.py b/api/controllers/console/app/workflow_draft_variable.py index da6b56d026..0722eb40d2 100644 --- a/api/controllers/console/app/workflow_draft_variable.py +++ b/api/controllers/console/app/workflow_draft_variable.py @@ -22,8 +22,7 @@ from extensions.ext_database import db from factories.file_factory import build_from_mapping, build_from_mappings from factories.variable_factory import build_segment_with_type from libs.login import current_user, login_required -from models import App, AppMode -from models.account import Account +from models import Account, App, AppMode from models.workflow import WorkflowDraftVariable from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService from services.workflow_service import WorkflowService @@ -58,16 +57,18 @@ def _serialize_var_value(variable: WorkflowDraftVariable): def _create_pagination_parser(): - parser = reqparse.RequestParser() - parser.add_argument( - "page", - type=inputs.int_range(1, 100_000), - required=False, - default=1, - location="args", - help="the page of data requested", + parser = ( + reqparse.RequestParser() + .add_argument( + "page", + type=inputs.int_range(1, 100_000), + required=False, + default=1, + location="args", + help="the page of data requested", + ) + .add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") ) - parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") return parser @@ -320,10 +321,11 @@ class VariableApi(Resource): # "upload_file_id": "1602650a-4fe4-423c-85a2-af76c083e3c4" # } - parser = reqparse.RequestParser() - parser.add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json") - # Parse 'value' field as-is to maintain its original data structure - parser.add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json") + .add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json") + ) draft_var_srv = WorkflowDraftVariableService( session=db.session(), diff --git a/api/controllers/console/app/workflow_run.py b/api/controllers/console/app/workflow_run.py index 23ba63845c..311aa81279 100644 --- a/api/controllers/console/app/workflow_run.py +++ b/api/controllers/console/app/workflow_run.py @@ -1,6 +1,5 @@ from typing import cast -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from flask_restx.inputs import int_range @@ -9,15 +8,81 @@ from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from fields.workflow_run_fields import ( advanced_chat_workflow_run_pagination_fields, + workflow_run_count_fields, workflow_run_detail_fields, workflow_run_node_execution_list_fields, workflow_run_pagination_fields, ) +from libs.custom_inputs import time_duration from libs.helper import uuid_value -from libs.login import login_required -from models import Account, App, AppMode, EndUser +from libs.login import current_user, login_required +from models import Account, App, AppMode, EndUser, WorkflowRunTriggeredFrom from services.workflow_run_service import WorkflowRunService +# Workflow run status choices for filtering +WORKFLOW_RUN_STATUS_CHOICES = ["running", "succeeded", "failed", "stopped", "partial-succeeded"] + + +def _parse_workflow_run_list_args(): + """ + Parse common arguments for workflow run list endpoints. + + Returns: + Parsed arguments containing last_id, limit, status, and triggered_from filters + """ + parser = reqparse.RequestParser() + parser.add_argument("last_id", type=uuid_value, location="args") + parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser.add_argument( + "status", + type=str, + choices=WORKFLOW_RUN_STATUS_CHOICES, + location="args", + required=False, + ) + parser.add_argument( + "triggered_from", + type=str, + choices=["debugging", "app-run"], + location="args", + required=False, + help="Filter by trigger source: debugging or app-run", + ) + return parser.parse_args() + + +def _parse_workflow_run_count_args(): + """ + Parse common arguments for workflow run count endpoints. + + Returns: + Parsed arguments containing status, time_range, and triggered_from filters + """ + parser = reqparse.RequestParser() + parser.add_argument( + "status", + type=str, + choices=WORKFLOW_RUN_STATUS_CHOICES, + location="args", + required=False, + ) + parser.add_argument( + "time_range", + type=time_duration, + location="args", + required=False, + help="Time range filter (e.g., 7d, 4h, 30m, 30s)", + ) + parser.add_argument( + "triggered_from", + type=str, + choices=["debugging", "app-run"], + location="args", + required=False, + help="Filter by trigger source: debugging or app-run", + ) + return parser.parse_args() + @console_ns.route("/apps//advanced-chat/workflow-runs") class AdvancedChatAppWorkflowRunListApi(Resource): @@ -25,6 +90,8 @@ class AdvancedChatAppWorkflowRunListApi(Resource): @api.doc(description="Get advanced chat workflow run list") @api.doc(params={"app_id": "Application ID"}) @api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"}) + @api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}) + @api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}) @api.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_fields) @setup_required @login_required @@ -35,13 +102,64 @@ class AdvancedChatAppWorkflowRunListApi(Resource): """ Get advanced chat app workflow run list """ - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - args = parser.parse_args() + args = _parse_workflow_run_list_args() + + # Default to DEBUGGING if not specified + triggered_from = ( + WorkflowRunTriggeredFrom(args.get("triggered_from")) + if args.get("triggered_from") + else WorkflowRunTriggeredFrom.DEBUGGING + ) workflow_run_service = WorkflowRunService() - result = workflow_run_service.get_paginate_advanced_chat_workflow_runs(app_model=app_model, args=args) + result = workflow_run_service.get_paginate_advanced_chat_workflow_runs( + app_model=app_model, args=args, triggered_from=triggered_from + ) + + return result + + +@console_ns.route("/apps//advanced-chat/workflow-runs/count") +class AdvancedChatAppWorkflowRunCountApi(Resource): + @api.doc("get_advanced_chat_workflow_runs_count") + @api.doc(description="Get advanced chat workflow runs count statistics") + @api.doc(params={"app_id": "Application ID"}) + @api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}) + @api.doc( + params={ + "time_range": ( + "Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), " + "30m (30 minutes), 30s (30 seconds). Filters by created_at field." + ) + } + ) + @api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}) + @api.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields) + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=[AppMode.ADVANCED_CHAT]) + @marshal_with(workflow_run_count_fields) + def get(self, app_model: App): + """ + Get advanced chat workflow runs count statistics + """ + args = _parse_workflow_run_count_args() + + # Default to DEBUGGING if not specified + triggered_from = ( + WorkflowRunTriggeredFrom(args.get("triggered_from")) + if args.get("triggered_from") + else WorkflowRunTriggeredFrom.DEBUGGING + ) + + workflow_run_service = WorkflowRunService() + result = workflow_run_service.get_workflow_runs_count( + app_model=app_model, + status=args.get("status"), + time_range=args.get("time_range"), + triggered_from=triggered_from, + ) return result @@ -52,6 +170,8 @@ class WorkflowRunListApi(Resource): @api.doc(description="Get workflow run list") @api.doc(params={"app_id": "Application ID"}) @api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"}) + @api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}) + @api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}) @api.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_fields) @setup_required @login_required @@ -62,13 +182,64 @@ class WorkflowRunListApi(Resource): """ Get workflow run list """ - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - args = parser.parse_args() + args = _parse_workflow_run_list_args() + + # Default to DEBUGGING for workflow if not specified (backward compatibility) + triggered_from = ( + WorkflowRunTriggeredFrom(args.get("triggered_from")) + if args.get("triggered_from") + else WorkflowRunTriggeredFrom.DEBUGGING + ) workflow_run_service = WorkflowRunService() - result = workflow_run_service.get_paginate_workflow_runs(app_model=app_model, args=args) + result = workflow_run_service.get_paginate_workflow_runs( + app_model=app_model, args=args, triggered_from=triggered_from + ) + + return result + + +@console_ns.route("/apps//workflow-runs/count") +class WorkflowRunCountApi(Resource): + @api.doc("get_workflow_runs_count") + @api.doc(description="Get workflow runs count statistics") + @api.doc(params={"app_id": "Application ID"}) + @api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}) + @api.doc( + params={ + "time_range": ( + "Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), " + "30m (30 minutes), 30s (30 seconds). Filters by created_at field." + ) + } + ) + @api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}) + @api.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields) + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @marshal_with(workflow_run_count_fields) + def get(self, app_model: App): + """ + Get workflow runs count statistics + """ + args = _parse_workflow_run_count_args() + + # Default to DEBUGGING for workflow if not specified (backward compatibility) + triggered_from = ( + WorkflowRunTriggeredFrom(args.get("triggered_from")) + if args.get("triggered_from") + else WorkflowRunTriggeredFrom.DEBUGGING + ) + + workflow_run_service = WorkflowRunService() + result = workflow_run_service.get_workflow_runs_count( + app_model=app_model, + status=args.get("status"), + time_range=args.get("time_range"), + triggered_from=triggered_from, + ) return result diff --git a/api/controllers/console/app/workflow_statistic.py b/api/controllers/console/app/workflow_statistic.py index b8904bf3d9..bbea04640a 100644 --- a/api/controllers/console/app/workflow_statistic.py +++ b/api/controllers/console/app/workflow_statistic.py @@ -4,7 +4,6 @@ from decimal import Decimal import pytz import sqlalchemy as sa from flask import jsonify -from flask_login import current_user from flask_restx import Resource, reqparse from controllers.console import api, console_ns @@ -12,7 +11,7 @@ from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from extensions.ext_database import db from libs.helper import DatetimeString -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.enums import WorkflowRunTriggeredFrom from models.model import AppMode @@ -29,11 +28,13 @@ class WorkflowDailyRunsStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -49,7 +50,7 @@ WHERE "app_id": app_model.id, "triggered_from": WorkflowRunTriggeredFrom.APP_RUN, } - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -97,11 +98,13 @@ class WorkflowDailyTerminalsStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -117,7 +120,7 @@ WHERE "app_id": app_model.id, "triggered_from": WorkflowRunTriggeredFrom.APP_RUN, } - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -165,11 +168,13 @@ class WorkflowDailyTokenCostStatistic(Resource): @login_required @account_initialization_required def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -185,7 +190,7 @@ WHERE "app_id": app_model.id, "triggered_from": WorkflowRunTriggeredFrom.APP_RUN, } - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc @@ -238,11 +243,13 @@ class WorkflowAverageAppInteractionStatistic(Resource): @account_initialization_required @get_app_model(mode=[AppMode.WORKFLOW]) def get(self, app_model): - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") - parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + parser = ( + reqparse.RequestParser() + .add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + .add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args") + ) args = parser.parse_args() sql_query = """SELECT @@ -271,7 +278,7 @@ GROUP BY "app_id": app_model.id, "triggered_from": WorkflowRunTriggeredFrom.APP_RUN, } - + assert account.timezone is not None timezone = pytz.timezone(account.timezone) utc_timezone = pytz.utc diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py index 44aba01820..9bb2718f89 100644 --- a/api/controllers/console/app/wraps.py +++ b/api/controllers/console/app/wraps.py @@ -4,28 +4,29 @@ from typing import ParamSpec, TypeVar, Union from controllers.console.app.error import AppNotFoundError from extensions.ext_database import db -from libs.login import current_user +from libs.login import current_account_with_tenant from models import App, AppMode -from models.account import Account P = ParamSpec("P") R = TypeVar("R") +P1 = ParamSpec("P1") +R1 = TypeVar("R1") def _load_app_model(app_id: str) -> App | None: - assert isinstance(current_user, Account) + _, current_tenant_id = current_account_with_tenant() app_model = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) return app_model def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None): - def decorator(view_func: Callable[P, R]): + def decorator(view_func: Callable[P1, R1]): @wraps(view_func) - def decorated_view(*args: P.args, **kwargs: P.kwargs): + def decorated_view(*args: P1.args, **kwargs: P1.kwargs): if not kwargs.get("app_id"): raise ValueError("missing app_id in path parameters") diff --git a/api/controllers/console/auth/activate.py b/api/controllers/console/auth/activate.py index 76171e3f8a..2eeef079a1 100644 --- a/api/controllers/console/auth/activate.py +++ b/api/controllers/console/auth/activate.py @@ -7,18 +7,14 @@ from controllers.console.error import AlreadyActivateError from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from libs.helper import StrLen, email, extract_remote_ip, timezone -from models.account import AccountStatus +from models import AccountStatus from services.account_service import AccountService, RegisterService -active_check_parser = reqparse.RequestParser() -active_check_parser.add_argument( - "workspace_id", type=str, required=False, nullable=True, location="args", help="Workspace ID" -) -active_check_parser.add_argument( - "email", type=email, required=False, nullable=True, location="args", help="Email address" -) -active_check_parser.add_argument( - "token", type=str, required=True, nullable=False, location="args", help="Activation token" +active_check_parser = ( + reqparse.RequestParser() + .add_argument("workspace_id", type=str, required=False, nullable=True, location="args", help="Workspace ID") + .add_argument("email", type=email, required=False, nullable=True, location="args", help="Email address") + .add_argument("token", type=str, required=True, nullable=False, location="args", help="Activation token") ) @@ -60,15 +56,15 @@ class ActivateCheckApi(Resource): return {"is_valid": False} -active_parser = reqparse.RequestParser() -active_parser.add_argument("workspace_id", type=str, required=False, nullable=True, location="json") -active_parser.add_argument("email", type=email, required=False, nullable=True, location="json") -active_parser.add_argument("token", type=str, required=True, nullable=False, location="json") -active_parser.add_argument("name", type=StrLen(30), required=True, nullable=False, location="json") -active_parser.add_argument( - "interface_language", type=supported_language, required=True, nullable=False, location="json" +active_parser = ( + reqparse.RequestParser() + .add_argument("workspace_id", type=str, required=False, nullable=True, location="json") + .add_argument("email", type=email, required=False, nullable=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=StrLen(30), required=True, nullable=False, location="json") + .add_argument("interface_language", type=supported_language, required=True, nullable=False, location="json") + .add_argument("timezone", type=timezone, required=True, nullable=False, location="json") ) -active_parser.add_argument("timezone", type=timezone, required=True, nullable=False, location="json") @console_ns.route("/activate") diff --git a/api/controllers/console/auth/data_source_bearer_auth.py b/api/controllers/console/auth/data_source_bearer_auth.py index 207303b212..a06435267b 100644 --- a/api/controllers/console/auth/data_source_bearer_auth.py +++ b/api/controllers/console/auth/data_source_bearer_auth.py @@ -1,10 +1,9 @@ -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden from controllers.console import console_ns from controllers.console.auth.error import ApiKeyAuthFailedError -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.auth.api_key_auth_service import ApiKeyAuthService from ..wraps import account_initialization_required, setup_required @@ -16,7 +15,8 @@ class ApiKeyAuthDataSource(Resource): @login_required @account_initialization_required def get(self): - data_source_api_key_bindings = ApiKeyAuthService.get_provider_auth_list(current_user.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + data_source_api_key_bindings = ApiKeyAuthService.get_provider_auth_list(current_tenant_id) if data_source_api_key_bindings: return { "sources": [ @@ -41,16 +41,20 @@ class ApiKeyAuthDataSourceBinding(Resource): @account_initialization_required def post(self): # The role of the current user in the table must be admin or owner + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("category", type=str, required=True, nullable=False, location="json") - parser.add_argument("provider", type=str, required=True, nullable=False, location="json") - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("category", type=str, required=True, nullable=False, location="json") + .add_argument("provider", type=str, required=True, nullable=False, location="json") + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + ) args = parser.parse_args() ApiKeyAuthService.validate_api_key_auth_args(args) try: - ApiKeyAuthService.create_provider_auth(current_user.current_tenant_id, args) + ApiKeyAuthService.create_provider_auth(current_tenant_id, args) except Exception as e: raise ApiKeyAuthFailedError(str(e)) return {"result": "success"}, 200 @@ -63,9 +67,11 @@ class ApiKeyAuthDataSourceBindingDelete(Resource): @account_initialization_required def delete(self, binding_id): # The role of the current user in the table must be admin or owner + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - ApiKeyAuthService.delete_provider_auth(current_user.current_tenant_id, binding_id) + ApiKeyAuthService.delete_provider_auth(current_tenant_id, binding_id) return {"result": "success"}, 204 diff --git a/api/controllers/console/auth/data_source_oauth.py b/api/controllers/console/auth/data_source_oauth.py index 6f1fd2f11a..0fd433d718 100644 --- a/api/controllers/console/auth/data_source_oauth.py +++ b/api/controllers/console/auth/data_source_oauth.py @@ -2,13 +2,12 @@ import logging import httpx from flask import current_app, redirect, request -from flask_login import current_user from flask_restx import Resource, fields from werkzeug.exceptions import Forbidden from configs import dify_config from controllers.console import api, console_ns -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from libs.oauth_data_source import NotionOAuth from ..wraps import account_initialization_required, setup_required @@ -45,6 +44,7 @@ class OAuthDataSource(Resource): @api.response(403, "Admin privileges required") def get(self, provider: str): # The role of the current user in the table must be admin or owner + current_user, _ = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers() diff --git a/api/controllers/console/auth/email_register.py b/api/controllers/console/auth/email_register.py index d3613d9183..fe2bb54e0b 100644 --- a/api/controllers/console/auth/email_register.py +++ b/api/controllers/console/auth/email_register.py @@ -19,7 +19,7 @@ from controllers.console.wraps import email_password_login_enabled, email_regist from extensions.ext_database import db from libs.helper import email, extract_remote_ip from libs.password import valid_password -from models.account import Account +from models import Account from services.account_service import AccountService from services.billing_service import BillingService from services.errors.account import AccountNotFoundError, AccountRegisterError @@ -31,9 +31,11 @@ class EmailRegisterSendEmailApi(Resource): @email_password_login_enabled @email_register_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() ip_address = extract_remote_ip(request) @@ -59,10 +61,12 @@ class EmailRegisterCheckApi(Resource): @email_password_login_enabled @email_register_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=str, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() user_email = args["email"] @@ -100,10 +104,12 @@ class EmailRegisterResetApi(Resource): @email_password_login_enabled @email_register_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("token", type=str, required=True, nullable=False, location="json") - parser.add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") - parser.add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("token", type=str, required=True, nullable=False, location="json") + .add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") + .add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") + ) args = parser.parse_args() # Validate passwords match diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py index 704bcf8fb8..6be6ad51fe 100644 --- a/api/controllers/console/auth/forgot_password.py +++ b/api/controllers/console/auth/forgot_password.py @@ -20,7 +20,7 @@ from events.tenant_event import tenant_was_created from extensions.ext_database import db from libs.helper import email, extract_remote_ip from libs.password import hash_password, valid_password -from models.account import Account +from models import Account from services.account_service import AccountService, TenantService from services.feature_service import FeatureService @@ -54,9 +54,11 @@ class ForgotPasswordSendEmailApi(Resource): @setup_required @email_password_login_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() ip_address = extract_remote_ip(request) @@ -111,10 +113,12 @@ class ForgotPasswordCheckApi(Resource): @setup_required @email_password_login_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=str, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() user_email = args["email"] @@ -169,10 +173,12 @@ class ForgotPasswordResetApi(Resource): @setup_required @email_password_login_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("token", type=str, required=True, nullable=False, location="json") - parser.add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") - parser.add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("token", type=str, required=True, nullable=False, location="json") + .add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") + .add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") + ) args = parser.parse_args() # Validate passwords match diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index ba614aa828..277f9a60a8 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -1,7 +1,5 @@ -from typing import cast - import flask_login -from flask import request +from flask import make_response, request from flask_restx import Resource, reqparse import services @@ -26,7 +24,17 @@ from controllers.console.error import ( from controllers.console.wraps import email_password_login_enabled, setup_required from events.tenant_event import tenant_was_created from libs.helper import email, extract_remote_ip -from models.account import Account +from libs.login import current_account_with_tenant +from libs.token import ( + clear_access_token_from_cookie, + clear_csrf_token_from_cookie, + clear_refresh_token_from_cookie, + extract_access_token, + extract_csrf_token, + set_access_token_to_cookie, + set_csrf_token_to_cookie, + set_refresh_token_to_cookie, +) from services.account_service import AccountService, RegisterService, TenantService from services.billing_service import BillingService from services.errors.account import AccountRegisterError @@ -42,11 +50,13 @@ class LoginApi(Resource): @email_password_login_enabled def post(self): """Authenticate user and login.""" - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("password", type=str, required=True, location="json") - parser.add_argument("remember_me", type=bool, required=False, default=False, location="json") - parser.add_argument("invite_token", type=str, required=False, default=None, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("password", type=str, required=True, location="json") + .add_argument("remember_me", type=bool, required=False, default=False, location="json") + .add_argument("invite_token", type=str, required=False, default=None, location="json") + ) args = parser.parse_args() if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(args["email"]): @@ -89,19 +99,36 @@ class LoginApi(Resource): token_pair = AccountService.login(account=account, ip_address=extract_remote_ip(request)) AccountService.reset_login_error_rate_limit(args["email"]) - return {"result": "success", "data": token_pair.model_dump()} + + # Create response with cookies instead of returning tokens in body + response = make_response({"result": "success"}) + + set_access_token_to_cookie(request, response, token_pair.access_token) + set_refresh_token_to_cookie(request, response, token_pair.refresh_token) + set_csrf_token_to_cookie(request, response, token_pair.csrf_token) + + return response @console_ns.route("/logout") class LogoutApi(Resource): @setup_required - def get(self): - account = cast(Account, flask_login.current_user) + def post(self): + current_user, _ = current_account_with_tenant() + account = current_user if isinstance(account, flask_login.AnonymousUserMixin): - return {"result": "success"} - AccountService.logout(account=account) - flask_login.logout_user() - return {"result": "success"} + response = make_response({"result": "success"}) + else: + AccountService.logout(account=account) + flask_login.logout_user() + response = make_response({"result": "success"}) + + # Clear cookies on logout + clear_access_token_from_cookie(response) + clear_refresh_token_from_cookie(response) + clear_csrf_token_from_cookie(response) + + return response @console_ns.route("/reset-password") @@ -109,9 +136,11 @@ class ResetPasswordSendEmailApi(Resource): @setup_required @email_password_login_enabled def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() if args["language"] is not None and args["language"] == "zh-Hans": @@ -137,9 +166,11 @@ class ResetPasswordSendEmailApi(Resource): class EmailCodeLoginSendEmailApi(Resource): @setup_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() ip_address = extract_remote_ip(request) @@ -170,10 +201,12 @@ class EmailCodeLoginSendEmailApi(Resource): class EmailCodeLoginApi(Resource): @setup_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=str, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, location="json") + ) args = parser.parse_args() user_email = args["email"] @@ -220,18 +253,46 @@ class EmailCodeLoginApi(Resource): raise WorkspacesLimitExceeded() token_pair = AccountService.login(account, ip_address=extract_remote_ip(request)) AccountService.reset_login_error_rate_limit(args["email"]) - return {"result": "success", "data": token_pair.model_dump()} + + # Create response with cookies instead of returning tokens in body + response = make_response({"result": "success"}) + + set_csrf_token_to_cookie(request, response, token_pair.csrf_token) + # Set HTTP-only secure cookies for tokens + set_access_token_to_cookie(request, response, token_pair.access_token) + set_refresh_token_to_cookie(request, response, token_pair.refresh_token) + return response @console_ns.route("/refresh-token") class RefreshTokenApi(Resource): def post(self): - parser = reqparse.RequestParser() - parser.add_argument("refresh_token", type=str, required=True, location="json") - args = parser.parse_args() + # Get refresh token from cookie instead of request body + refresh_token = request.cookies.get("refresh_token") + + if not refresh_token: + return {"result": "fail", "message": "No refresh token provided"}, 401 try: - new_token_pair = AccountService.refresh_token(args["refresh_token"]) - return {"result": "success", "data": new_token_pair.model_dump()} + new_token_pair = AccountService.refresh_token(refresh_token) + + # Create response with new cookies + response = make_response({"result": "success"}) + + # Update cookies with new tokens + set_csrf_token_to_cookie(request, response, new_token_pair.csrf_token) + set_access_token_to_cookie(request, response, new_token_pair.access_token) + set_refresh_token_to_cookie(request, response, new_token_pair.refresh_token) + return response except Exception as e: - return {"result": "fail", "data": str(e)}, 401 + return {"result": "fail", "message": str(e)}, 401 + + +# this api helps frontend to check whether user is authenticated +# TODO: remove in the future. frontend should redirect to login page by catching 401 status +@console_ns.route("/login/status") +class LoginStatus(Resource): + def get(self): + token = extract_access_token(request) + csrf_token = extract_csrf_token(request) + return {"logged_in": bool(token) and bool(csrf_token)} diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index 4efeceb676..29653b32ec 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -14,8 +14,12 @@ from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from libs.helper import extract_remote_ip from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo -from models import Account -from models.account import AccountStatus +from libs.token import ( + set_access_token_to_cookie, + set_csrf_token_to_cookie, + set_refresh_token_to_cookie, +) +from models import Account, AccountStatus from services.account_service import AccountService, RegisterService, TenantService from services.billing_service import BillingService from services.errors.account import AccountNotFoundError, AccountRegisterError @@ -153,9 +157,12 @@ class OAuthCallback(Resource): ip_address=extract_remote_ip(request), ) - return redirect( - f"{dify_config.CONSOLE_WEB_URL}?access_token={token_pair.access_token}&refresh_token={token_pair.refresh_token}" - ) + response = redirect(f"{dify_config.CONSOLE_WEB_URL}") + + set_access_token_to_cookie(request, response, token_pair.access_token) + set_refresh_token_to_cookie(request, response, token_pair.refresh_token) + set_csrf_token_to_cookie(request, response, token_pair.csrf_token) + return response def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Account | None: diff --git a/api/controllers/console/auth/oauth_server.py b/api/controllers/console/auth/oauth_server.py index 46281860ae..5e12aa7d03 100644 --- a/api/controllers/console/auth/oauth_server.py +++ b/api/controllers/console/auth/oauth_server.py @@ -1,16 +1,15 @@ from collections.abc import Callable from functools import wraps -from typing import Concatenate, ParamSpec, TypeVar, cast +from typing import Concatenate, ParamSpec, TypeVar -import flask_login from flask import jsonify, request from flask_restx import Resource, reqparse from werkzeug.exceptions import BadRequest, NotFound from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder -from libs.login import login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required +from models import Account from models.model import OAuthProviderApp from services.oauth_server import OAUTH_ACCESS_TOKEN_EXPIRES_IN, OAuthGrantType, OAuthServerService @@ -24,8 +23,7 @@ T = TypeVar("T") def oauth_server_client_id_required(view: Callable[Concatenate[T, OAuthProviderApp, P], R]): @wraps(view) def decorated(self: T, *args: P.args, **kwargs: P.kwargs): - parser = reqparse.RequestParser() - parser.add_argument("client_id", type=str, required=True, location="json") + parser = reqparse.RequestParser().add_argument("client_id", type=str, required=True, location="json") parsed_args = parser.parse_args() client_id = parsed_args.get("client_id") if not client_id: @@ -91,8 +89,7 @@ class OAuthServerAppApi(Resource): @setup_required @oauth_server_client_id_required def post(self, oauth_provider_app: OAuthProviderApp): - parser = reqparse.RequestParser() - parser.add_argument("redirect_uri", type=str, required=True, location="json") + parser = reqparse.RequestParser().add_argument("redirect_uri", type=str, required=True, location="json") parsed_args = parser.parse_args() redirect_uri = parsed_args.get("redirect_uri") @@ -116,7 +113,8 @@ class OAuthServerUserAuthorizeApi(Resource): @account_initialization_required @oauth_server_client_id_required def post(self, oauth_provider_app: OAuthProviderApp): - account = cast(Account, flask_login.current_user) + current_user, _ = current_account_with_tenant() + account = current_user user_account_id = account.id code = OAuthServerService.sign_oauth_authorization_code(oauth_provider_app.client_id, user_account_id) @@ -132,12 +130,14 @@ class OAuthServerUserTokenApi(Resource): @setup_required @oauth_server_client_id_required def post(self, oauth_provider_app: OAuthProviderApp): - parser = reqparse.RequestParser() - parser.add_argument("grant_type", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=False, location="json") - parser.add_argument("client_secret", type=str, required=False, location="json") - parser.add_argument("redirect_uri", type=str, required=False, location="json") - parser.add_argument("refresh_token", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("grant_type", type=str, required=True, location="json") + .add_argument("code", type=str, required=False, location="json") + .add_argument("client_secret", type=str, required=False, location="json") + .add_argument("redirect_uri", type=str, required=False, location="json") + .add_argument("refresh_token", type=str, required=False, location="json") + ) parsed_args = parser.parse_args() try: diff --git a/api/controllers/console/billing/billing.py b/api/controllers/console/billing/billing.py index fa89f45122..705f5970dd 100644 --- a/api/controllers/console/billing/billing.py +++ b/api/controllers/console/billing/billing.py @@ -2,8 +2,7 @@ from flask_restx import Resource, reqparse from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required -from libs.login import current_user, login_required -from models.model import Account +from libs.login import current_account_with_tenant, login_required from services.billing_service import BillingService @@ -14,17 +13,15 @@ class Subscription(Resource): @account_initialization_required @only_edition_cloud def get(self): - parser = reqparse.RequestParser() - parser.add_argument("plan", type=str, required=True, location="args", choices=["professional", "team"]) - parser.add_argument("interval", type=str, required=True, location="args", choices=["month", "year"]) - args = parser.parse_args() - assert isinstance(current_user, Account) - - BillingService.is_tenant_owner_or_admin(current_user) - assert current_user.current_tenant_id is not None - return BillingService.get_subscription( - args["plan"], args["interval"], current_user.email, current_user.current_tenant_id + current_user, current_tenant_id = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("plan", type=str, required=True, location="args", choices=["professional", "team"]) + .add_argument("interval", type=str, required=True, location="args", choices=["month", "year"]) ) + args = parser.parse_args() + BillingService.is_tenant_owner_or_admin(current_user) + return BillingService.get_subscription(args["plan"], args["interval"], current_user.email, current_tenant_id) @console_ns.route("/billing/invoices") @@ -34,7 +31,6 @@ class Invoices(Resource): @account_initialization_required @only_edition_cloud def get(self): - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() BillingService.is_tenant_owner_or_admin(current_user) - assert current_user.current_tenant_id is not None - return BillingService.get_invoices(current_user.email, current_user.current_tenant_id) + return BillingService.get_invoices(current_user.email, current_tenant_id) diff --git a/api/controllers/console/billing/compliance.py b/api/controllers/console/billing/compliance.py index c0d104e0d4..2a6889968c 100644 --- a/api/controllers/console/billing/compliance.py +++ b/api/controllers/console/billing/compliance.py @@ -2,8 +2,7 @@ from flask import request from flask_restx import Resource, reqparse from libs.helper import extract_remote_ip -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from services.billing_service import BillingService from .. import console_ns @@ -17,19 +16,16 @@ class ComplianceApi(Resource): @account_initialization_required @only_edition_cloud def get(self): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None - parser = reqparse.RequestParser() - parser.add_argument("doc_name", type=str, required=True, location="args") + current_user, current_tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("doc_name", type=str, required=True, location="args") args = parser.parse_args() ip_address = extract_remote_ip(request) device_info = request.headers.get("User-Agent", "Unknown device") - return BillingService.get_compliance_download_link( doc_name=args.doc_name, account_id=current_user.id, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, ip=ip_address, device_info=device_info, ) diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py index 6d9d675e87..ef66053075 100644 --- a/api/controllers/console/datasets/data_source.py +++ b/api/controllers/console/datasets/data_source.py @@ -3,7 +3,6 @@ from collections.abc import Generator from typing import cast from flask import request -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session @@ -20,7 +19,7 @@ from core.rag.extractor.notion_extractor import NotionExtractor from extensions.ext_database import db from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields from libs.datetime_utils import naive_utc_now -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import DataSourceOauthBinding, Document from services.dataset_service import DatasetService, DocumentService from services.datasource_provider_service import DatasourceProviderService @@ -37,10 +36,12 @@ class DataSourceApi(Resource): @account_initialization_required @marshal_with(integrate_list_fields) def get(self): + _, current_tenant_id = current_account_with_tenant() + # get workspace data source integrates data_source_integrates = db.session.scalars( select(DataSourceOauthBinding).where( - DataSourceOauthBinding.tenant_id == current_user.current_tenant_id, + DataSourceOauthBinding.tenant_id == current_tenant_id, DataSourceOauthBinding.disabled == False, ) ).all() @@ -120,13 +121,15 @@ class DataSourceNotionListApi(Resource): @account_initialization_required @marshal_with(integrate_notion_info_list_fields) def get(self): + current_user, current_tenant_id = current_account_with_tenant() + dataset_id = request.args.get("dataset_id", default=None, type=str) credential_id = request.args.get("credential_id", default=None, type=str) if not credential_id: raise ValueError("Credential id is required.") datasource_provider_service = DatasourceProviderService() credential = datasource_provider_service.get_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, credential_id=credential_id, provider="notion_datasource", plugin_id="langgenius/notion_datasource", @@ -146,7 +149,7 @@ class DataSourceNotionListApi(Resource): documents = session.scalars( select(Document).filter_by( dataset_id=dataset_id, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, data_source_type="notion_import", enabled=True, ) @@ -161,7 +164,7 @@ class DataSourceNotionListApi(Resource): datasource_runtime = DatasourceManager.get_datasource_runtime( provider_id="langgenius/notion_datasource/notion_datasource", datasource_name="notion_datasource", - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_type=DatasourceProviderType.ONLINE_DOCUMENT, ) datasource_provider_service = DatasourceProviderService() @@ -210,12 +213,14 @@ class DataSourceNotionApi(Resource): @login_required @account_initialization_required def get(self, workspace_id, page_id, page_type): + _, current_tenant_id = current_account_with_tenant() + credential_id = request.args.get("credential_id", default=None, type=str) if not credential_id: raise ValueError("Credential id is required.") datasource_provider_service = DatasourceProviderService() credential = datasource_provider_service.get_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, credential_id=credential_id, provider="notion_datasource", plugin_id="langgenius/notion_datasource", @@ -229,7 +234,7 @@ class DataSourceNotionApi(Resource): notion_obj_id=page_id, notion_page_type=page_type, notion_access_token=credential.get("integration_secret"), - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, ) text_docs = extractor.extract() @@ -239,12 +244,14 @@ class DataSourceNotionApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("notion_info_list", type=list, required=True, nullable=True, location="json") - parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json") - parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") - parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" + _, current_tenant_id = current_account_with_tenant() + + parser = ( + reqparse.RequestParser() + .add_argument("notion_info_list", type=list, required=True, nullable=True, location="json") + .add_argument("process_rule", type=dict, required=True, nullable=True, location="json") + .add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") + .add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json") ) args = parser.parse_args() # validate args @@ -263,7 +270,7 @@ class DataSourceNotionApi(Resource): "notion_workspace_id": workspace_id, "notion_obj_id": page["page_id"], "notion_page_type": page["type"], - "tenant_id": current_user.current_tenant_id, + "tenant_id": current_tenant_id, } ), document_model=args["doc_form"], @@ -271,7 +278,7 @@ class DataSourceNotionApi(Resource): extract_settings.append(extract_setting) indexing_runner = IndexingRunner() response = indexing_runner.indexing_estimate( - current_user.current_tenant_id, + current_tenant_id, extract_settings, args["process_rule"], args["doc_form"], diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index f86c5dfc3c..50bf48450c 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -1,7 +1,6 @@ from typing import Any, cast from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal, marshal_with, reqparse from sqlalchemy import select from werkzeug.exceptions import Forbidden, NotFound @@ -30,10 +29,9 @@ from extensions.ext_database import db from fields.app_fields import related_app_list from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fields from fields.document_fields import document_status_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from libs.validators import validate_description_length from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile -from models.account import Account from models.dataset import DatasetPermissionEnum from models.provider_ids import ModelProviderID from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService @@ -138,6 +136,7 @@ class DatasetListApi(Resource): @account_initialization_required @enterprise_license_required def get(self): + current_user, current_tenant_id = current_account_with_tenant() page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) ids = request.args.getlist("ids") @@ -146,15 +145,15 @@ class DatasetListApi(Resource): tag_ids = request.args.getlist("tag_ids") include_all = request.args.get("include_all", default="false").lower() == "true" if ids: - datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id) + datasets, total = DatasetService.get_datasets_by_ids(ids, current_tenant_id) else: datasets, total = DatasetService.get_datasets( - page, limit, current_user.current_tenant_id, current_user, search, tag_ids, include_all + page, limit, current_tenant_id, current_user, search, tag_ids, include_all ) # check embedding setting provider_manager = ProviderManager() - configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id) + configurations = provider_manager.get_configurations(tenant_id=current_tenant_id) embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True) @@ -207,50 +206,53 @@ class DatasetListApi(Resource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def post(self): - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="type is required. Name must be between 1 to 40 characters.", - type=_validate_name, - ) - parser.add_argument( - "description", - type=validate_description_length, - nullable=True, - required=False, - default="", - ) - parser.add_argument( - "indexing_technique", - type=str, - location="json", - choices=Dataset.INDEXING_TECHNIQUE_LIST, - nullable=True, - help="Invalid indexing technique.", - ) - parser.add_argument( - "external_knowledge_api_id", - type=str, - nullable=True, - required=False, - ) - parser.add_argument( - "provider", - type=str, - nullable=True, - choices=Dataset.PROVIDER_LIST, - required=False, - default="vendor", - ) - parser.add_argument( - "external_knowledge_id", - type=str, - nullable=True, - required=False, + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="type is required. Name must be between 1 to 40 characters.", + type=_validate_name, + ) + .add_argument( + "description", + type=validate_description_length, + nullable=True, + required=False, + default="", + ) + .add_argument( + "indexing_technique", + type=str, + location="json", + choices=Dataset.INDEXING_TECHNIQUE_LIST, + nullable=True, + help="Invalid indexing technique.", + ) + .add_argument( + "external_knowledge_api_id", + type=str, + nullable=True, + required=False, + ) + .add_argument( + "provider", + type=str, + nullable=True, + choices=Dataset.PROVIDER_LIST, + required=False, + default="vendor", + ) + .add_argument( + "external_knowledge_id", + type=str, + nullable=True, + required=False, + ) ) args = parser.parse_args() + current_user, current_tenant_id = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator if not current_user.is_dataset_editor: @@ -258,11 +260,11 @@ class DatasetListApi(Resource): try: dataset = DatasetService.create_empty_dataset( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, name=args["name"], description=args["description"], indexing_technique=args["indexing_technique"], - account=cast(Account, current_user), + account=current_user, permission=DatasetPermissionEnum.ONLY_ME, provider=args["provider"], external_knowledge_api_id=args["external_knowledge_api_id"], @@ -286,6 +288,7 @@ class DatasetApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + current_user, current_tenant_id = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -305,7 +308,7 @@ class DatasetApi(Resource): # check embedding setting provider_manager = ProviderManager() - configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id) + configurations = provider_manager.get_configurations(tenant_id=current_tenant_id) embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True) @@ -351,73 +354,76 @@ class DatasetApi(Resource): if dataset is None: raise NotFound("Dataset not found.") - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - help="type is required. Name must be between 1 to 40 characters.", - type=_validate_name, - ) - parser.add_argument("description", location="json", store_missing=False, type=validate_description_length) - parser.add_argument( - "indexing_technique", - type=str, - location="json", - choices=Dataset.INDEXING_TECHNIQUE_LIST, - nullable=True, - help="Invalid indexing technique.", - ) - parser.add_argument( - "permission", - type=str, - location="json", - choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), - help="Invalid permission.", - ) - parser.add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.") - parser.add_argument( - "embedding_model_provider", type=str, location="json", help="Invalid embedding model provider." - ) - parser.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.") - parser.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.") - - parser.add_argument( - "external_retrieval_model", - type=dict, - required=False, - nullable=True, - location="json", - help="Invalid external retrieval model.", - ) - - parser.add_argument( - "external_knowledge_id", - type=str, - required=False, - nullable=True, - location="json", - help="Invalid external knowledge id.", - ) - - parser.add_argument( - "external_knowledge_api_id", - type=str, - required=False, - nullable=True, - location="json", - help="Invalid external knowledge api id.", - ) - - parser.add_argument( - "icon_info", - type=dict, - required=False, - nullable=True, - location="json", - help="Invalid icon info.", + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + help="type is required. Name must be between 1 to 40 characters.", + type=_validate_name, + ) + .add_argument("description", location="json", store_missing=False, type=validate_description_length) + .add_argument( + "indexing_technique", + type=str, + location="json", + choices=Dataset.INDEXING_TECHNIQUE_LIST, + nullable=True, + help="Invalid indexing technique.", + ) + .add_argument( + "permission", + type=str, + location="json", + choices=( + DatasetPermissionEnum.ONLY_ME, + DatasetPermissionEnum.ALL_TEAM, + DatasetPermissionEnum.PARTIAL_TEAM, + ), + help="Invalid permission.", + ) + .add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.") + .add_argument( + "embedding_model_provider", type=str, location="json", help="Invalid embedding model provider." + ) + .add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.") + .add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.") + .add_argument( + "external_retrieval_model", + type=dict, + required=False, + nullable=True, + location="json", + help="Invalid external retrieval model.", + ) + .add_argument( + "external_knowledge_id", + type=str, + required=False, + nullable=True, + location="json", + help="Invalid external knowledge id.", + ) + .add_argument( + "external_knowledge_api_id", + type=str, + required=False, + nullable=True, + location="json", + help="Invalid external knowledge api id.", + ) + .add_argument( + "icon_info", + type=dict, + required=False, + nullable=True, + location="json", + help="Invalid icon info.", + ) ) args = parser.parse_args() data = request.get_json() + current_user, current_tenant_id = current_account_with_tenant() # check embedding model setting if ( @@ -440,7 +446,7 @@ class DatasetApi(Resource): raise NotFound("Dataset not found.") result_data = cast(dict[str, Any], marshal(dataset, dataset_detail_fields)) - tenant_id = current_user.current_tenant_id + tenant_id = current_tenant_id if data.get("partial_member_list") and data.get("permission") == "partial_members": DatasetPermissionService.update_partial_member_list( @@ -464,9 +470,9 @@ class DatasetApi(Resource): @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id): dataset_id_str = str(dataset_id) + current_user, _ = current_account_with_tenant() - # The role of the current user in the ta table must be admin, owner, or editor - if not (current_user.is_editor or current_user.is_dataset_operator): + if not (current_user.has_edit_permission or current_user.is_dataset_operator): raise Forbidden() try: @@ -505,6 +511,7 @@ class DatasetQueryApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -539,32 +546,31 @@ class DatasetIndexingEstimateApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("info_list", type=dict, required=True, nullable=True, location="json") - parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json") - parser.add_argument( - "indexing_technique", - type=str, - required=True, - choices=Dataset.INDEXING_TECHNIQUE_LIST, - nullable=True, - location="json", - ) - parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") - parser.add_argument("dataset_id", type=str, required=False, nullable=False, location="json") - parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" + parser = ( + reqparse.RequestParser() + .add_argument("info_list", type=dict, required=True, nullable=True, location="json") + .add_argument("process_rule", type=dict, required=True, nullable=True, location="json") + .add_argument( + "indexing_technique", + type=str, + required=True, + choices=Dataset.INDEXING_TECHNIQUE_LIST, + nullable=True, + location="json", + ) + .add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") + .add_argument("dataset_id", type=str, required=False, nullable=False, location="json") + .add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json") ) args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() # validate args DocumentService.estimate_args_validate(args) extract_settings = [] if args["info_list"]["data_source_type"] == "upload_file": file_ids = args["info_list"]["file_info_list"]["file_ids"] file_details = db.session.scalars( - select(UploadFile).where( - UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id.in_(file_ids) - ) + select(UploadFile).where(UploadFile.tenant_id == current_tenant_id, UploadFile.id.in_(file_ids)) ).all() if file_details is None: @@ -592,7 +598,7 @@ class DatasetIndexingEstimateApi(Resource): "notion_workspace_id": workspace_id, "notion_obj_id": page["page_id"], "notion_page_type": page["type"], - "tenant_id": current_user.current_tenant_id, + "tenant_id": current_tenant_id, } ), document_model=args["doc_form"], @@ -608,7 +614,7 @@ class DatasetIndexingEstimateApi(Resource): "provider": website_info_list["provider"], "job_id": website_info_list["job_id"], "url": url, - "tenant_id": current_user.current_tenant_id, + "tenant_id": current_tenant_id, "mode": "crawl", "only_main_content": website_info_list["only_main_content"], } @@ -621,7 +627,7 @@ class DatasetIndexingEstimateApi(Resource): indexing_runner = IndexingRunner() try: response = indexing_runner.indexing_estimate( - current_user.current_tenant_id, + current_tenant_id, extract_settings, args["process_rule"], args["doc_form"], @@ -652,6 +658,7 @@ class DatasetRelatedAppListApi(Resource): @account_initialization_required @marshal_with(related_app_list) def get(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -683,11 +690,10 @@ class DatasetIndexingStatusApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + _, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) documents = db.session.scalars( - select(Document).where( - Document.dataset_id == dataset_id, Document.tenant_id == current_user.current_tenant_id - ) + select(Document).where(Document.dataset_id == dataset_id, Document.tenant_id == current_tenant_id) ).all() documents_status = [] for document in documents: @@ -739,10 +745,9 @@ class DatasetApiKeyApi(Resource): @account_initialization_required @marshal_with(api_key_list) def get(self): + _, current_tenant_id = current_account_with_tenant() keys = db.session.scalars( - select(ApiToken).where( - ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id - ) + select(ApiToken).where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_tenant_id) ).all() return {"items": keys} @@ -752,12 +757,13 @@ class DatasetApiKeyApi(Resource): @marshal_with(api_key_fields) def post(self): # The role of the current user in the ta table must be admin or owner + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() current_key_count = ( db.session.query(ApiToken) - .where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id) + .where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_tenant_id) .count() ) @@ -770,7 +776,7 @@ class DatasetApiKeyApi(Resource): key = ApiToken.generate_api_key(self.token_prefix, 24) api_token = ApiToken() - api_token.tenant_id = current_user.current_tenant_id + api_token.tenant_id = current_tenant_id api_token.token = key api_token.type = self.resource_type db.session.add(api_token) @@ -790,6 +796,7 @@ class DatasetApiDeleteApi(Resource): @login_required @account_initialization_required def delete(self, api_key_id): + current_user, current_tenant_id = current_account_with_tenant() api_key_id = str(api_key_id) # The role of the current user in the ta table must be admin or owner @@ -799,7 +806,7 @@ class DatasetApiDeleteApi(Resource): key = ( db.session.query(ApiToken) .where( - ApiToken.tenant_id == current_user.current_tenant_id, + ApiToken.tenant_id == current_tenant_id, ApiToken.type == self.resource_type, ApiToken.id == api_key_id, ) @@ -898,6 +905,7 @@ class DatasetPermissionUserListApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 011dacde76..85fd0535c7 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -6,7 +6,6 @@ from typing import Literal, cast import sqlalchemy as sa from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal, marshal_with, reqparse from sqlalchemy import asc, desc, select from werkzeug.exceptions import Forbidden, NotFound @@ -53,9 +52,8 @@ from fields.document_fields import ( document_with_segments_fields, ) from libs.datetime_utils import naive_utc_now -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile -from models.account import Account from models.dataset import DocumentPipelineExecutionLog from services.dataset_service import DatasetService, DocumentService from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig @@ -65,6 +63,7 @@ logger = logging.getLogger(__name__) class DocumentResource(Resource): def get_document(self, dataset_id: str, document_id: str) -> Document: + current_user, current_tenant_id = current_account_with_tenant() dataset = DatasetService.get_dataset(dataset_id) if not dataset: raise NotFound("Dataset not found.") @@ -79,12 +78,13 @@ class DocumentResource(Resource): if not document: raise NotFound("Document not found.") - if document.tenant_id != current_user.current_tenant_id: + if document.tenant_id != current_tenant_id: raise Forbidden("No permission.") return document def get_batch_documents(self, dataset_id: str, batch: str) -> Sequence[Document]: + current_user, _ = current_account_with_tenant() dataset = DatasetService.get_dataset(dataset_id) if not dataset: raise NotFound("Dataset not found.") @@ -112,6 +112,7 @@ class GetProcessRuleApi(Resource): @login_required @account_initialization_required def get(self): + current_user, _ = current_account_with_tenant() req_data = request.args document_id = req_data.get("document_id") @@ -168,6 +169,7 @@ class DatasetDocumentListApi(Resource): @login_required @account_initialization_required def get(self, dataset_id): + current_user, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) @@ -199,7 +201,7 @@ class DatasetDocumentListApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=current_user.current_tenant_id) + query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=current_tenant_id) if search: search = f"%{search}%" @@ -273,6 +275,7 @@ class DatasetDocumentListApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -289,20 +292,20 @@ class DatasetDocumentListApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - parser = reqparse.RequestParser() - parser.add_argument( - "indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json" - ) - parser.add_argument("data_source", type=dict, required=False, location="json") - parser.add_argument("process_rule", type=dict, required=False, location="json") - parser.add_argument("duplicate", type=bool, default=True, nullable=False, location="json") - parser.add_argument("original_document_id", type=str, required=False, location="json") - parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") - parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") - parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json") - parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") - parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" + parser = ( + reqparse.RequestParser() + .add_argument( + "indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json" + ) + .add_argument("data_source", type=dict, required=False, location="json") + .add_argument("process_rule", type=dict, required=False, location="json") + .add_argument("duplicate", type=bool, default=True, nullable=False, location="json") + .add_argument("original_document_id", type=str, required=False, location="json") + .add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") + .add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") + .add_argument("embedding_model", type=str, required=False, nullable=True, location="json") + .add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") + .add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json") ) args = parser.parse_args() knowledge_config = KnowledgeConfig.model_validate(args) @@ -372,27 +375,28 @@ class DatasetInitApi(Resource): @cloud_edition_billing_rate_limit_check("knowledge") def post(self): # The role of the current user in the ta table must be admin, owner, dataset_operator, or editor + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_dataset_editor: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument( - "indexing_technique", - type=str, - choices=Dataset.INDEXING_TECHNIQUE_LIST, - required=True, - nullable=False, - location="json", + parser = ( + reqparse.RequestParser() + .add_argument( + "indexing_technique", + type=str, + choices=Dataset.INDEXING_TECHNIQUE_LIST, + required=True, + nullable=False, + location="json", + ) + .add_argument("data_source", type=dict, required=True, nullable=True, location="json") + .add_argument("process_rule", type=dict, required=True, nullable=True, location="json") + .add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") + .add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json") + .add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") + .add_argument("embedding_model", type=str, required=False, nullable=True, location="json") + .add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") ) - parser.add_argument("data_source", type=dict, required=True, nullable=True, location="json") - parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json") - parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") - parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" - ) - parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") - parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json") - parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") args = parser.parse_args() knowledge_config = KnowledgeConfig.model_validate(args) @@ -402,7 +406,7 @@ class DatasetInitApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=args["embedding_model_provider"], model_type=ModelType.TEXT_EMBEDDING, model=args["embedding_model"], @@ -419,9 +423,9 @@ class DatasetInitApi(Resource): try: dataset, documents, batch = DocumentService.save_document_without_dataset_id( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, knowledge_config=knowledge_config, - account=cast(Account, current_user), + account=current_user, ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -447,6 +451,7 @@ class DocumentIndexingEstimateApi(DocumentResource): @login_required @account_initialization_required def get(self, dataset_id, document_id): + _, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) document_id = str(document_id) document = self.get_document(dataset_id, document_id) @@ -482,7 +487,7 @@ class DocumentIndexingEstimateApi(DocumentResource): try: estimate_response = indexing_runner.indexing_estimate( - current_user.current_tenant_id, + current_tenant_id, [extract_setting], data_process_rule_dict, document.doc_form, @@ -511,6 +516,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): @login_required @account_initialization_required def get(self, dataset_id, batch): + _, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) batch = str(batch) documents = self.get_batch_documents(dataset_id, batch) @@ -530,7 +536,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): file_id = data_source_info["upload_file_id"] file_detail = ( db.session.query(UploadFile) - .where(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id == file_id) + .where(UploadFile.tenant_id == current_tenant_id, UploadFile.id == file_id) .first() ) @@ -553,7 +559,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): "notion_workspace_id": data_source_info["notion_workspace_id"], "notion_obj_id": data_source_info["notion_page_id"], "notion_page_type": data_source_info["type"], - "tenant_id": current_user.current_tenant_id, + "tenant_id": current_tenant_id, } ), document_model=document.doc_form, @@ -569,7 +575,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): "provider": data_source_info["provider"], "job_id": data_source_info["job_id"], "url": data_source_info["url"], - "tenant_id": current_user.current_tenant_id, + "tenant_id": current_tenant_id, "mode": data_source_info["mode"], "only_main_content": data_source_info["only_main_content"], } @@ -583,7 +589,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): indexing_runner = IndexingRunner() try: response = indexing_runner.indexing_estimate( - current_user.current_tenant_id, + current_tenant_id, extract_settings, data_process_rule_dict, document.doc_form, @@ -834,6 +840,7 @@ class DocumentProcessingApi(DocumentResource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, action: Literal["pause", "resume"]): + current_user, _ = current_account_with_tenant() dataset_id = str(dataset_id) document_id = str(document_id) document = self.get_document(dataset_id, document_id) @@ -884,6 +891,7 @@ class DocumentMetadataApi(DocumentResource): @login_required @account_initialization_required def put(self, dataset_id, document_id): + current_user, _ = current_account_with_tenant() dataset_id = str(dataset_id) document_id = str(document_id) document = self.get_document(dataset_id, document_id) @@ -931,6 +939,7 @@ class DocumentStatusApi(DocumentResource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, action: Literal["enable", "disable", "archive", "un_archive"]): + current_user, _ = current_account_with_tenant() dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) if dataset is None: @@ -1034,8 +1043,9 @@ class DocumentRetryApi(DocumentResource): def post(self, dataset_id): """retry document.""" - parser = reqparse.RequestParser() - parser.add_argument("document_ids", type=list, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "document_ids", type=list, required=True, nullable=False, location="json" + ) args = parser.parse_args() dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -1077,14 +1087,14 @@ class DocumentRenameApi(DocumentResource): @marshal_with(document_fields) def post(self, dataset_id, document_id): # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator + current_user, _ = current_account_with_tenant() if not current_user.is_dataset_editor: raise Forbidden() dataset = DatasetService.get_dataset(dataset_id) if not dataset: raise NotFound("Dataset not found.") - DatasetService.check_dataset_operator_permission(cast(Account, current_user), dataset) - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, nullable=False, location="json") + DatasetService.check_dataset_operator_permission(current_user, dataset) + parser = reqparse.RequestParser().add_argument("name", type=str, required=True, nullable=False, location="json") args = parser.parse_args() try: @@ -1102,6 +1112,7 @@ class WebsiteDocumentSyncApi(DocumentResource): @account_initialization_required def get(self, dataset_id, document_id): """sync website document.""" + _, current_tenant_id = current_account_with_tenant() dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) if not dataset: @@ -1110,7 +1121,7 @@ class WebsiteDocumentSyncApi(DocumentResource): document = DocumentService.get_document(dataset.id, document_id) if not document: raise NotFound("Document not found.") - if document.tenant_id != current_user.current_tenant_id: + if document.tenant_id != current_tenant_id: raise Forbidden("No permission.") if document.data_source_type != "website_crawl": raise ValueError("Document is not a website document.") diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index d6bd02483d..2fe7d42e46 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -1,7 +1,6 @@ import uuid from flask import request -from flask_login import current_user from flask_restx import Resource, marshal, reqparse from sqlalchemy import select from werkzeug.exceptions import Forbidden, NotFound @@ -27,7 +26,7 @@ from core.model_runtime.entities.model_entities import ModelType from extensions.ext_database import db from extensions.ext_redis import redis_client from fields.segment_fields import child_chunk_fields, segment_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.dataset import ChildChunk, DocumentSegment from models.model import UploadFile from services.dataset_service import DatasetService, DocumentService, SegmentService @@ -43,6 +42,8 @@ class DatasetDocumentSegmentListApi(Resource): @login_required @account_initialization_required def get(self, dataset_id, document_id): + current_user, current_tenant_id = current_account_with_tenant() + dataset_id = str(dataset_id) document_id = str(document_id) dataset = DatasetService.get_dataset(dataset_id) @@ -59,13 +60,15 @@ class DatasetDocumentSegmentListApi(Resource): if not document: raise NotFound("Document not found.") - parser = reqparse.RequestParser() - parser.add_argument("limit", type=int, default=20, location="args") - parser.add_argument("status", type=str, action="append", default=[], location="args") - parser.add_argument("hit_count_gte", type=int, default=None, location="args") - parser.add_argument("enabled", type=str, default="all", location="args") - parser.add_argument("keyword", type=str, default=None, location="args") - parser.add_argument("page", type=int, default=1, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("limit", type=int, default=20, location="args") + .add_argument("status", type=str, action="append", default=[], location="args") + .add_argument("hit_count_gte", type=int, default=None, location="args") + .add_argument("enabled", type=str, default="all", location="args") + .add_argument("keyword", type=str, default=None, location="args") + .add_argument("page", type=int, default=1, location="args") + ) args = parser.parse_args() @@ -79,7 +82,7 @@ class DatasetDocumentSegmentListApi(Resource): select(DocumentSegment) .where( DocumentSegment.document_id == str(document_id), - DocumentSegment.tenant_id == current_user.current_tenant_id, + DocumentSegment.tenant_id == current_tenant_id, ) .order_by(DocumentSegment.position.asc()) ) @@ -115,6 +118,8 @@ class DatasetDocumentSegmentListApi(Resource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id): + current_user, _ = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -148,6 +153,8 @@ class DatasetDocumentSegmentApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, action): + current_user, current_tenant_id = current_account_with_tenant() + dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) if not dataset: @@ -171,7 +178,7 @@ class DatasetDocumentSegmentApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -204,6 +211,8 @@ class DatasetDocumentSegmentAddApi(Resource): @cloud_edition_billing_knowledge_limit_check("add_segment") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id, document_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -221,7 +230,7 @@ class DatasetDocumentSegmentAddApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -237,10 +246,12 @@ class DatasetDocumentSegmentAddApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) # validate args - parser = reqparse.RequestParser() - parser.add_argument("content", type=str, required=True, nullable=False, location="json") - parser.add_argument("answer", type=str, required=False, nullable=True, location="json") - parser.add_argument("keywords", type=list, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("content", type=str, required=True, nullable=False, location="json") + .add_argument("answer", type=str, required=False, nullable=True, location="json") + .add_argument("keywords", type=list, required=False, nullable=True, location="json") + ) args = parser.parse_args() SegmentService.segment_create_args_validate(args, document) segment = SegmentService.create_segment(args, document, dataset) @@ -255,6 +266,8 @@ class DatasetDocumentSegmentUpdateApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, segment_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -272,7 +285,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -287,7 +300,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -300,12 +313,14 @@ class DatasetDocumentSegmentUpdateApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) # validate args - parser = reqparse.RequestParser() - parser.add_argument("content", type=str, required=True, nullable=False, location="json") - parser.add_argument("answer", type=str, required=False, nullable=True, location="json") - parser.add_argument("keywords", type=list, required=False, nullable=True, location="json") - parser.add_argument( - "regenerate_child_chunks", type=bool, required=False, nullable=True, default=False, location="json" + parser = ( + reqparse.RequestParser() + .add_argument("content", type=str, required=True, nullable=False, location="json") + .add_argument("answer", type=str, required=False, nullable=True, location="json") + .add_argument("keywords", type=list, required=False, nullable=True, location="json") + .add_argument( + "regenerate_child_chunks", type=bool, required=False, nullable=True, default=False, location="json" + ) ) args = parser.parse_args() SegmentService.segment_create_args_validate(args, document) @@ -317,6 +332,8 @@ class DatasetDocumentSegmentUpdateApi(Resource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id, segment_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -333,7 +350,7 @@ class DatasetDocumentSegmentUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -361,6 +378,8 @@ class DatasetDocumentSegmentBatchImportApi(Resource): @cloud_edition_billing_knowledge_limit_check("add_segment") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id, document_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -372,8 +391,9 @@ class DatasetDocumentSegmentBatchImportApi(Resource): if not document: raise NotFound("Document not found.") - parser = reqparse.RequestParser() - parser.add_argument("upload_file_id", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "upload_file_id", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() upload_file_id = args["upload_file_id"] @@ -396,7 +416,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource): upload_file_id, dataset_id, document_id, - current_user.current_tenant_id, + current_tenant_id, current_user.id, ) except Exception as e: @@ -427,6 +447,8 @@ class ChildChunkAddApi(Resource): @cloud_edition_billing_knowledge_limit_check("add_segment") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id, document_id, segment_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -441,7 +463,7 @@ class ChildChunkAddApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -453,7 +475,7 @@ class ChildChunkAddApi(Resource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -469,8 +491,9 @@ class ChildChunkAddApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) # validate args - parser = reqparse.RequestParser() - parser.add_argument("content", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "content", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() try: content = args["content"] @@ -483,6 +506,8 @@ class ChildChunkAddApi(Resource): @login_required @account_initialization_required def get(self, dataset_id, document_id, segment_id): + _, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -499,15 +524,17 @@ class ChildChunkAddApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: raise NotFound("Segment not found.") - parser = reqparse.RequestParser() - parser.add_argument("limit", type=int, default=20, location="args") - parser.add_argument("keyword", type=str, default=None, location="args") - parser.add_argument("page", type=int, default=1, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("limit", type=int, default=20, location="args") + .add_argument("keyword", type=str, default=None, location="args") + .add_argument("page", type=int, default=1, location="args") + ) args = parser.parse_args() @@ -530,6 +557,8 @@ class ChildChunkAddApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, segment_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -546,7 +575,7 @@ class ChildChunkAddApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -559,8 +588,9 @@ class ChildChunkAddApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) # validate args - parser = reqparse.RequestParser() - parser.add_argument("chunks", type=list, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "chunks", type=list, required=True, nullable=False, location="json" + ) args = parser.parse_args() try: chunks_data = args["chunks"] @@ -580,6 +610,8 @@ class ChildChunkUpdateApi(Resource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def delete(self, dataset_id, document_id, segment_id, child_chunk_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -596,7 +628,7 @@ class ChildChunkUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -607,7 +639,7 @@ class ChildChunkUpdateApi(Resource): db.session.query(ChildChunk) .where( ChildChunk.id == str(child_chunk_id), - ChildChunk.tenant_id == current_user.current_tenant_id, + ChildChunk.tenant_id == current_tenant_id, ChildChunk.segment_id == segment.id, ChildChunk.document_id == document_id, ) @@ -634,6 +666,8 @@ class ChildChunkUpdateApi(Resource): @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def patch(self, dataset_id, document_id, segment_id, child_chunk_id): + current_user, current_tenant_id = current_account_with_tenant() + # check dataset dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) @@ -650,7 +684,7 @@ class ChildChunkUpdateApi(Resource): segment_id = str(segment_id) segment = ( db.session.query(DocumentSegment) - .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id) + .where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_tenant_id) .first() ) if not segment: @@ -661,7 +695,7 @@ class ChildChunkUpdateApi(Resource): db.session.query(ChildChunk) .where( ChildChunk.id == str(child_chunk_id), - ChildChunk.tenant_id == current_user.current_tenant_id, + ChildChunk.tenant_id == current_tenant_id, ChildChunk.segment_id == segment.id, ChildChunk.document_id == document_id, ) @@ -677,8 +711,9 @@ class ChildChunkUpdateApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) # validate args - parser = reqparse.RequestParser() - parser.add_argument("content", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "content", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() try: content = args["content"] diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py index adf9f53523..4f738db0e5 100644 --- a/api/controllers/console/datasets/external.py +++ b/api/controllers/console/datasets/external.py @@ -1,7 +1,4 @@ -from typing import cast - from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal, reqparse from werkzeug.exceptions import Forbidden, InternalServerError, NotFound @@ -10,8 +7,7 @@ from controllers.console import api, console_ns from controllers.console.datasets.error import DatasetNameDuplicateError from controllers.console.wraps import account_initialization_required, setup_required from fields.dataset_fields import dataset_detail_fields -from libs.login import login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from services.dataset_service import DatasetService from services.external_knowledge_service import ExternalDatasetService from services.hit_testing_service import HitTestingService @@ -40,12 +36,13 @@ class ExternalApiTemplateListApi(Resource): @login_required @account_initialization_required def get(self): + _, current_tenant_id = current_account_with_tenant() page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) search = request.args.get("keyword", default=None, type=str) external_knowledge_apis, total = ExternalDatasetService.get_external_knowledge_apis( - page, limit, current_user.current_tenant_id, search + page, limit, current_tenant_id, search ) response = { "data": [item.to_dict() for item in external_knowledge_apis], @@ -60,20 +57,23 @@ class ExternalApiTemplateListApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="Name is required. Name must be between 1 to 100 characters.", - type=_validate_name, - ) - parser.add_argument( - "settings", - type=dict, - location="json", - nullable=False, - required=True, + current_user, current_tenant_id = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="Name is required. Name must be between 1 to 100 characters.", + type=_validate_name, + ) + .add_argument( + "settings", + type=dict, + location="json", + nullable=False, + required=True, + ) ) args = parser.parse_args() @@ -85,7 +85,7 @@ class ExternalApiTemplateListApi(Resource): try: external_knowledge_api = ExternalDatasetService.create_external_knowledge_api( - tenant_id=current_user.current_tenant_id, user_id=current_user.id, args=args + tenant_id=current_tenant_id, user_id=current_user.id, args=args ) except services.errors.dataset.DatasetNameDuplicateError: raise DatasetNameDuplicateError() @@ -115,28 +115,31 @@ class ExternalApiTemplateApi(Resource): @login_required @account_initialization_required def patch(self, external_knowledge_api_id): + current_user, current_tenant_id = current_account_with_tenant() external_knowledge_api_id = str(external_knowledge_api_id) - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="type is required. Name must be between 1 to 100 characters.", - type=_validate_name, - ) - parser.add_argument( - "settings", - type=dict, - location="json", - nullable=False, - required=True, + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="type is required. Name must be between 1 to 100 characters.", + type=_validate_name, + ) + .add_argument( + "settings", + type=dict, + location="json", + nullable=False, + required=True, + ) ) args = parser.parse_args() ExternalDatasetService.validate_api_list(args["settings"]) external_knowledge_api = ExternalDatasetService.update_external_knowledge_api( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, user_id=current_user.id, external_knowledge_api_id=external_knowledge_api_id, args=args, @@ -148,13 +151,13 @@ class ExternalApiTemplateApi(Resource): @login_required @account_initialization_required def delete(self, external_knowledge_api_id): + current_user, current_tenant_id = current_account_with_tenant() external_knowledge_api_id = str(external_knowledge_api_id) - # The role of the current user in the ta table must be admin, owner, or editor - if not (current_user.is_editor or current_user.is_dataset_operator): + if not (current_user.has_edit_permission or current_user.is_dataset_operator): raise Forbidden() - ExternalDatasetService.delete_external_knowledge_api(current_user.current_tenant_id, external_knowledge_api_id) + ExternalDatasetService.delete_external_knowledge_api(current_tenant_id, external_knowledge_api_id) return {"result": "success"}, 204 @@ -199,21 +202,24 @@ class ExternalDatasetCreateApi(Resource): @account_initialization_required def post(self): # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("external_knowledge_api_id", type=str, required=True, nullable=False, location="json") - parser.add_argument("external_knowledge_id", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "name", - nullable=False, - required=True, - help="name is required. Name must be between 1 to 100 characters.", - type=_validate_name, + parser = ( + reqparse.RequestParser() + .add_argument("external_knowledge_api_id", type=str, required=True, nullable=False, location="json") + .add_argument("external_knowledge_id", type=str, required=True, nullable=False, location="json") + .add_argument( + "name", + nullable=False, + required=True, + help="name is required. Name must be between 1 to 100 characters.", + type=_validate_name, + ) + .add_argument("description", type=str, required=False, nullable=True, location="json") + .add_argument("external_retrieval_model", type=dict, required=False, location="json") ) - parser.add_argument("description", type=str, required=False, nullable=True, location="json") - parser.add_argument("external_retrieval_model", type=dict, required=False, location="json") args = parser.parse_args() @@ -223,7 +229,7 @@ class ExternalDatasetCreateApi(Resource): try: dataset = ExternalDatasetService.create_external_dataset( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, user_id=current_user.id, args=args, ) @@ -255,6 +261,7 @@ class ExternalKnowledgeHitTestingApi(Resource): @login_required @account_initialization_required def post(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -265,10 +272,12 @@ class ExternalKnowledgeHitTestingApi(Resource): except services.errors.account.NoPermissionError as e: raise Forbidden(str(e)) - parser = reqparse.RequestParser() - parser.add_argument("query", type=str, location="json") - parser.add_argument("external_retrieval_model", type=dict, required=False, location="json") - parser.add_argument("metadata_filtering_conditions", type=dict, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("query", type=str, location="json") + .add_argument("external_retrieval_model", type=dict, required=False, location="json") + .add_argument("metadata_filtering_conditions", type=dict, required=False, location="json") + ) args = parser.parse_args() HitTestingService.hit_testing_args_check(args) @@ -277,7 +286,7 @@ class ExternalKnowledgeHitTestingApi(Resource): response = HitTestingService.external_retrieve( dataset=dataset, query=args["query"], - account=cast(Account, current_user), + account=current_user, external_retrieval_model=args["external_retrieval_model"], metadata_filtering_conditions=args["metadata_filtering_conditions"], ) @@ -304,15 +313,17 @@ class BedrockRetrievalApi(Resource): ) @api.response(200, "Bedrock retrieval test completed") def post(self): - parser = reqparse.RequestParser() - parser.add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json") - parser.add_argument( - "query", - nullable=False, - required=True, - type=str, + parser = ( + reqparse.RequestParser() + .add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json") + .add_argument( + "query", + nullable=False, + required=True, + type=str, + ) + .add_argument("knowledge_id", nullable=False, required=True, type=str) ) - parser.add_argument("knowledge_id", nullable=False, required=True, type=str) args = parser.parse_args() # Call the knowledge retrieval service diff --git a/api/controllers/console/datasets/hit_testing_base.py b/api/controllers/console/datasets/hit_testing_base.py index 6113f1fd17..99d4d5a29c 100644 --- a/api/controllers/console/datasets/hit_testing_base.py +++ b/api/controllers/console/datasets/hit_testing_base.py @@ -48,11 +48,12 @@ class DatasetsHitTestingBase: @staticmethod def parse_args(): - parser = reqparse.RequestParser() - - parser.add_argument("query", type=str, location="json") - parser.add_argument("retrieval_model", type=dict, required=False, location="json") - parser.add_argument("external_retrieval_model", type=dict, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("query", type=str, location="json") + .add_argument("retrieval_model", type=dict, required=False, location="json") + .add_argument("external_retrieval_model", type=dict, required=False, location="json") + ) return parser.parse_args() @staticmethod diff --git a/api/controllers/console/datasets/metadata.py b/api/controllers/console/datasets/metadata.py index 8438458617..72b2ff0ff8 100644 --- a/api/controllers/console/datasets/metadata.py +++ b/api/controllers/console/datasets/metadata.py @@ -1,13 +1,12 @@ from typing import Literal -from flask_login import current_user from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import NotFound from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required from fields.dataset_fields import dataset_metadata_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.dataset_service import DatasetService from services.entities.knowledge_entities.knowledge_entities import ( MetadataArgs, @@ -24,9 +23,12 @@ class DatasetMetadataCreateApi(Resource): @enterprise_license_required @marshal_with(dataset_metadata_fields) def post(self, dataset_id): - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() metadata_args = MetadataArgs.model_validate(args) @@ -59,8 +61,8 @@ class DatasetMetadataApi(Resource): @enterprise_license_required @marshal_with(dataset_metadata_fields) def patch(self, dataset_id, metadata_id): - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, nullable=False, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("name", type=str, required=True, nullable=False, location="json") args = parser.parse_args() name = args["name"] @@ -79,6 +81,7 @@ class DatasetMetadataApi(Resource): @account_initialization_required @enterprise_license_required def delete(self, dataset_id, metadata_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) metadata_id_str = str(metadata_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -108,6 +111,7 @@ class DatasetMetadataBuiltInFieldActionApi(Resource): @account_initialization_required @enterprise_license_required def post(self, dataset_id, action: Literal["enable", "disable"]): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -128,14 +132,16 @@ class DocumentMetadataEditApi(Resource): @account_initialization_required @enterprise_license_required def post(self, dataset_id): + current_user, _ = current_account_with_tenant() dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: raise NotFound("Dataset not found.") DatasetService.check_dataset_permission(dataset, current_user) - parser = reqparse.RequestParser() - parser.add_argument("operation_data", type=list, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "operation_data", type=list, required=True, nullable=False, location="json" + ) args = parser.parse_args() metadata_args = MetadataOperationData.model_validate(args) diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py index 53b5a0d965..2111ee2ecf 100644 --- a/api/controllers/console/datasets/rag_pipeline/datasource_auth.py +++ b/api/controllers/console/datasets/rag_pipeline/datasource_auth.py @@ -1,19 +1,15 @@ from flask import make_response, redirect, request -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden, NotFound from configs import dify_config from controllers.console import console_ns -from controllers.console.wraps import ( - account_initialization_required, - setup_required, -) +from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.oauth import OAuthHandler from libs.helper import StrLen -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.provider_ids import DatasourceProviderID from services.datasource_provider_service import DatasourceProviderService from services.plugin.oauth_service import OAuthProxyService @@ -24,11 +20,11 @@ class DatasourcePluginOAuthAuthorizationUrl(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def get(self, provider_id: str): - user = current_user - tenant_id = user.current_tenant_id - if not current_user.is_editor: - raise Forbidden() + current_user, current_tenant_id = current_account_with_tenant() + + tenant_id = current_tenant_id credential_id = request.args.get("credential_id") datasource_provider_id = DatasourceProviderID(provider_id) @@ -52,7 +48,7 @@ class DatasourcePluginOAuthAuthorizationUrl(Resource): redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider_id}/datasource/callback" authorization_url_response = oauth_handler.get_authorization_url( tenant_id=tenant_id, - user_id=user.id, + user_id=current_user.id, plugin_id=plugin_id, provider=provider_name, redirect_uri=redirect_uri, @@ -130,22 +126,24 @@ class DatasourceAuth(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): - if not current_user.is_editor: - raise Forbidden() + _, current_tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument( - "name", type=StrLen(max_length=100), required=False, nullable=True, location="json", default=None + parser = ( + reqparse.RequestParser() + .add_argument( + "name", type=StrLen(max_length=100), required=False, nullable=True, location="json", default=None + ) + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") ) - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() try: datasource_provider_service.add_datasource_api_key_provider( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider_id=datasource_provider_id, credentials=args["credentials"], name=args["name"], @@ -160,8 +158,10 @@ class DatasourceAuth(Resource): def get(self, provider_id: str): datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() + _, current_tenant_id = current_account_with_tenant() + datasources = datasource_provider_service.list_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=datasource_provider_id.provider_name, plugin_id=datasource_provider_id.plugin_id, ) @@ -173,18 +173,21 @@ class DatasourceAuthDeleteApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_id = DatasourceProviderID(provider_id) plugin_id = datasource_provider_id.plugin_id provider_name = datasource_provider_id.provider_name - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") + + parser = reqparse.RequestParser().add_argument( + "credential_id", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() datasource_provider_service = DatasourceProviderService() datasource_provider_service.remove_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, auth_id=args["credential_id"], provider=provider_name, plugin_id=plugin_id, @@ -197,18 +200,22 @@ class DatasourceAuthUpdateApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_id = DatasourceProviderID(provider_id) - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=False, nullable=True, location="json") - parser.add_argument("name", type=StrLen(max_length=100), required=False, nullable=True, location="json") - parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credentials", type=dict, required=False, nullable=True, location="json") + .add_argument("name", type=StrLen(max_length=100), required=False, nullable=True, location="json") + .add_argument("credential_id", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() - if not current_user.is_editor: - raise Forbidden() + datasource_provider_service = DatasourceProviderService() datasource_provider_service.update_datasource_credentials( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, auth_id=args["credential_id"], provider=datasource_provider_id.provider_name, plugin_id=datasource_provider_id.plugin_id, @@ -224,10 +231,10 @@ class DatasourceAuthListApi(Resource): @login_required @account_initialization_required def get(self): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_service = DatasourceProviderService() - datasources = datasource_provider_service.get_all_datasource_credentials( - tenant_id=current_user.current_tenant_id - ) + datasources = datasource_provider_service.get_all_datasource_credentials(tenant_id=current_tenant_id) return {"result": jsonable_encoder(datasources)}, 200 @@ -237,10 +244,10 @@ class DatasourceHardCodeAuthListApi(Resource): @login_required @account_initialization_required def get(self): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_service = DatasourceProviderService() - datasources = datasource_provider_service.get_hard_code_datasource_credentials( - tenant_id=current_user.current_tenant_id - ) + datasources = datasource_provider_service.get_hard_code_datasource_credentials(tenant_id=current_tenant_id) return {"result": jsonable_encoder(datasources)}, 200 @@ -249,17 +256,20 @@ class DatasourceAuthOauthCustomClient(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("client_params", type=dict, required=False, nullable=True, location="json") - parser.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json") + _, current_tenant_id = current_account_with_tenant() + + parser = ( + reqparse.RequestParser() + .add_argument("client_params", type=dict, required=False, nullable=True, location="json") + .add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json") + ) args = parser.parse_args() datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() datasource_provider_service.setup_oauth_custom_client_params( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_provider_id=datasource_provider_id, client_params=args.get("client_params", {}), enabled=args.get("enable_oauth_custom_client", False), @@ -270,10 +280,12 @@ class DatasourceAuthOauthCustomClient(Resource): @login_required @account_initialization_required def delete(self, provider_id: str): + _, current_tenant_id = current_account_with_tenant() + datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() datasource_provider_service.remove_oauth_custom_client_params( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_provider_id=datasource_provider_id, ) return {"result": "success"}, 200 @@ -284,16 +296,16 @@ class DatasourceAuthDefaultApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("id", type=str, required=True, nullable=False, location="json") + _, current_tenant_id = current_account_with_tenant() + + parser = reqparse.RequestParser().add_argument("id", type=str, required=True, nullable=False, location="json") args = parser.parse_args() datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() datasource_provider_service.set_default_datasource_provider( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_provider_id=datasource_provider_id, credential_id=args["id"], ) @@ -305,17 +317,20 @@ class DatasourceUpdateProviderNameApi(Resource): @setup_required @login_required @account_initialization_required + @edit_permission_required def post(self, provider_id: str): - if not current_user.is_editor: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("name", type=StrLen(max_length=100), required=True, nullable=False, location="json") - parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") + _, current_tenant_id = current_account_with_tenant() + + parser = ( + reqparse.RequestParser() + .add_argument("name", type=StrLen(max_length=100), required=True, nullable=False, location="json") + .add_argument("credential_id", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() datasource_provider_id = DatasourceProviderID(provider_id) datasource_provider_service = DatasourceProviderService() datasource_provider_service.update_datasource_provider_name( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, datasource_provider_id=datasource_provider_id, name=args["name"], credential_id=args["credential_id"], diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py index 6c04cc877a..856e4a1c70 100644 --- a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py +++ b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py @@ -26,10 +26,12 @@ class DataSourceContentPreviewApi(Resource): if not isinstance(current_user, Account): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("credential_id", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("credential_id", type=str, required=False, location="json") + ) args = parser.parse_args() inputs = args.get("inputs") diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py index e021f95283..f589bba3bf 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline.py @@ -66,26 +66,28 @@ class CustomizedPipelineTemplateApi(Resource): @account_initialization_required @enterprise_license_required def patch(self, template_id: str): - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="Name must be between 1 to 40 characters.", - type=_validate_name, - ) - parser.add_argument( - "description", - type=_validate_description_length, - nullable=True, - required=False, - default="", - ) - parser.add_argument( - "icon_info", - type=dict, - location="json", - nullable=True, + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="Name must be between 1 to 40 characters.", + type=_validate_name, + ) + .add_argument( + "description", + type=_validate_description_length, + nullable=True, + required=False, + default="", + ) + .add_argument( + "icon_info", + type=dict, + location="json", + nullable=True, + ) ) args = parser.parse_args() pipeline_template_info = PipelineTemplateInfoEntity.model_validate(args) @@ -123,26 +125,28 @@ class PublishCustomizedPipelineTemplateApi(Resource): @enterprise_license_required @knowledge_pipeline_publish_enabled def post(self, pipeline_id: str): - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="Name must be between 1 to 40 characters.", - type=_validate_name, - ) - parser.add_argument( - "description", - type=_validate_description_length, - nullable=True, - required=False, - default="", - ) - parser.add_argument( - "icon_info", - type=dict, - location="json", - nullable=True, + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="Name must be between 1 to 40 characters.", + type=_validate_name, + ) + .add_argument( + "description", + type=_validate_description_length, + nullable=True, + required=False, + default="", + ) + .add_argument( + "icon_info", + type=dict, + location="json", + nullable=True, + ) ) args = parser.parse_args() rag_pipeline_service = RagPipelineService() diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py index 404aa42073..98876e9f5e 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_datasets.py @@ -1,4 +1,3 @@ -from flask_login import current_user from flask_restx import Resource, marshal, reqparse from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden @@ -13,7 +12,7 @@ from controllers.console.wraps import ( ) from extensions.ext_database import db from fields.dataset_fields import dataset_detail_fields -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.dataset import DatasetPermissionEnum from services.dataset_service import DatasetPermissionService, DatasetService from services.entities.knowledge_entities.rag_pipeline_entities import IconInfo, RagPipelineDatasetCreateEntity @@ -27,9 +26,7 @@ class CreateRagPipelineDatasetApi(Resource): @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") def post(self): - parser = reqparse.RequestParser() - - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "yaml_content", type=str, nullable=False, @@ -38,7 +35,7 @@ class CreateRagPipelineDatasetApi(Resource): ) args = parser.parse_args() - + current_user, current_tenant_id = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator if not current_user.is_dataset_editor: raise Forbidden() @@ -58,12 +55,12 @@ class CreateRagPipelineDatasetApi(Resource): with Session(db.engine) as session: rag_pipeline_dsl_service = RagPipelineDslService(session) import_info = rag_pipeline_dsl_service.create_rag_pipeline_dataset( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, rag_pipeline_dataset_create_entity=rag_pipeline_dataset_create_entity, ) if rag_pipeline_dataset_create_entity.permission == "partial_members": DatasetPermissionService.update_partial_member_list( - current_user.current_tenant_id, + current_tenant_id, import_info["dataset_id"], rag_pipeline_dataset_create_entity.partial_member_list, ) @@ -81,10 +78,12 @@ class CreateEmptyRagPipelineDatasetApi(Resource): @cloud_edition_billing_rate_limit_check("knowledge") def post(self): # The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_dataset_editor: raise Forbidden() dataset = DatasetService.create_empty_rag_pipeline_dataset( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, rag_pipeline_dataset_create_entity=RagPipelineDatasetCreateEntity( name="", description="", diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py index bef6bfd13e..858ba94bf8 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_draft_variable.py @@ -23,7 +23,7 @@ from extensions.ext_database import db from factories.file_factory import build_from_mapping, build_from_mappings from factories.variable_factory import build_segment_with_type from libs.login import current_user, login_required -from models.account import Account +from models import Account from models.dataset import Pipeline from models.workflow import WorkflowDraftVariable from services.rag_pipeline.rag_pipeline import RagPipelineService @@ -33,16 +33,18 @@ logger = logging.getLogger(__name__) def _create_pagination_parser(): - parser = reqparse.RequestParser() - parser.add_argument( - "page", - type=inputs.int_range(1, 100_000), - required=False, - default=1, - location="args", - help="the page of data requested", + parser = ( + reqparse.RequestParser() + .add_argument( + "page", + type=inputs.int_range(1, 100_000), + required=False, + default=1, + location="args", + help="the page of data requested", + ) + .add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") ) - parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") return parser @@ -206,10 +208,11 @@ class RagPipelineVariableApi(Resource): # "upload_file_id": "1602650a-4fe4-423c-85a2-af76c083e3c4" # } - parser = reqparse.RequestParser() - parser.add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json") - # Parse 'value' field as-is to maintain its original data structure - parser.add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json") + .add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json") + ) draft_var_srv = WorkflowDraftVariableService( session=db.session(), diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py index a82872ba2b..2c28120e65 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py @@ -1,6 +1,3 @@ -from typing import cast - -from flask_login import current_user # type: ignore from flask_restx import Resource, marshal_with, reqparse # type: ignore from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden @@ -13,8 +10,7 @@ from controllers.console.wraps import ( ) from extensions.ext_database import db from fields.rag_pipeline_fields import pipeline_import_check_dependencies_fields, pipeline_import_fields -from libs.login import login_required -from models import Account +from libs.login import current_account_with_tenant, login_required from models.dataset import Pipeline from services.app_dsl_service import ImportStatus from services.rag_pipeline.rag_pipeline_dsl_service import RagPipelineDslService @@ -28,26 +24,29 @@ class RagPipelineImportApi(Resource): @marshal_with(pipeline_import_fields) def post(self): # Check user role first - if not current_user.is_editor: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("mode", type=str, required=True, location="json") - parser.add_argument("yaml_content", type=str, location="json") - parser.add_argument("yaml_url", type=str, location="json") - parser.add_argument("name", type=str, location="json") - parser.add_argument("description", type=str, location="json") - parser.add_argument("icon_type", type=str, location="json") - parser.add_argument("icon", type=str, location="json") - parser.add_argument("icon_background", type=str, location="json") - parser.add_argument("pipeline_id", type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("mode", type=str, required=True, location="json") + .add_argument("yaml_content", type=str, location="json") + .add_argument("yaml_url", type=str, location="json") + .add_argument("name", type=str, location="json") + .add_argument("description", type=str, location="json") + .add_argument("icon_type", type=str, location="json") + .add_argument("icon", type=str, location="json") + .add_argument("icon_background", type=str, location="json") + .add_argument("pipeline_id", type=str, location="json") + ) args = parser.parse_args() # Create service with session with Session(db.engine) as session: import_service = RagPipelineDslService(session) # Import app - account = cast(Account, current_user) + account = current_user result = import_service.import_rag_pipeline( account=account, import_mode=args["mode"], @@ -74,15 +73,16 @@ class RagPipelineImportConfirmApi(Resource): @account_initialization_required @marshal_with(pipeline_import_fields) def post(self, import_id): + current_user, _ = current_account_with_tenant() # Check user role first - if not current_user.is_editor: + if not current_user.has_edit_permission: raise Forbidden() # Create service with session with Session(db.engine) as session: import_service = RagPipelineDslService(session) # Confirm import - account = cast(Account, current_user) + account = current_user result = import_service.confirm_import(import_id=import_id, account=account) session.commit() @@ -100,7 +100,8 @@ class RagPipelineImportCheckDependenciesApi(Resource): @account_initialization_required @marshal_with(pipeline_import_check_dependencies_fields) def get(self, pipeline: Pipeline): - if not current_user.is_editor: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() with Session(db.engine) as session: @@ -117,12 +118,12 @@ class RagPipelineExportApi(Resource): @get_rag_pipeline @account_initialization_required def get(self, pipeline: Pipeline): - if not current_user.is_editor: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() # Add include_secret params - parser = reqparse.RequestParser() - parser.add_argument("include_secret", type=str, default="false", location="args") + parser = reqparse.RequestParser().add_argument("include_secret", type=str, default="false", location="args") args = parser.parse_args() with Session(db.engine) as session: diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py index a75c121fbe..5fe8572dfa 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py @@ -18,6 +18,7 @@ from controllers.console.app.error import ( from controllers.console.datasets.wraps import get_rag_pipeline from controllers.console.wraps import ( account_initialization_required, + edit_permission_required, setup_required, ) from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError @@ -36,8 +37,8 @@ from fields.workflow_run_fields import ( ) from libs import helper from libs.helper import TimestampField, uuid_value -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, current_user, login_required +from models import Account from models.dataset import Pipeline from models.model import EndUser from services.errors.app import WorkflowHashNotEqualError @@ -56,15 +57,12 @@ class DraftRagPipelineApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required @marshal_with(workflow_fields) def get(self, pipeline: Pipeline): """ Get draft rag pipeline's workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - # fetch draft workflow by app_model rag_pipeline_service = RagPipelineService() workflow = rag_pipeline_service.get_draft_workflow(pipeline=pipeline) @@ -79,23 +77,25 @@ class DraftRagPipelineApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def post(self, pipeline: Pipeline): """ Sync draft workflow """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() content_type = request.headers.get("Content-Type", "") if "application/json" in content_type: - parser = reqparse.RequestParser() - parser.add_argument("graph", type=dict, required=True, nullable=False, location="json") - parser.add_argument("hash", type=str, required=False, location="json") - parser.add_argument("environment_variables", type=list, required=False, location="json") - parser.add_argument("conversation_variables", type=list, required=False, location="json") - parser.add_argument("rag_pipeline_variables", type=list, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("graph", type=dict, required=True, nullable=False, location="json") + .add_argument("hash", type=str, required=False, location="json") + .add_argument("environment_variables", type=list, required=False, location="json") + .add_argument("conversation_variables", type=list, required=False, location="json") + .add_argument("rag_pipeline_variables", type=list, required=False, location="json") + ) args = parser.parse_args() elif "text/plain" in content_type: try: @@ -154,16 +154,15 @@ class RagPipelineDraftRunIterationNodeApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def post(self, pipeline: Pipeline, node_id: str): """ Run draft workflow iteration node """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() + current_user, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") + parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json") args = parser.parse_args() try: @@ -194,11 +193,11 @@ class RagPipelineDraftRunLoopNodeApi(Resource): Run draft workflow loop node """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, location="json") + parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json") args = parser.parse_args() try: @@ -229,14 +228,17 @@ class DraftRagPipelineRunApi(Resource): Run draft workflow """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("datasource_info_list", type=list, required=True, location="json") - parser.add_argument("start_node_id", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("datasource_info_list", type=list, required=True, location="json") + .add_argument("start_node_id", type=str, required=True, location="json") + ) args = parser.parse_args() try: @@ -264,17 +266,20 @@ class PublishedRagPipelineRunApi(Resource): Run published workflow """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("datasource_info_list", type=list, required=True, location="json") - parser.add_argument("start_node_id", type=str, required=True, location="json") - parser.add_argument("is_preview", type=bool, required=True, location="json", default=False) - parser.add_argument("response_mode", type=str, required=True, location="json", default="streaming") - parser.add_argument("original_document_id", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("datasource_info_list", type=list, required=True, location="json") + .add_argument("start_node_id", type=str, required=True, location="json") + .add_argument("is_preview", type=bool, required=True, location="json", default=False) + .add_argument("response_mode", type=str, required=True, location="json", default="streaming") + .add_argument("original_document_id", type=str, required=False, location="json") + ) args = parser.parse_args() streaming = args["response_mode"] == "streaming" @@ -303,15 +308,16 @@ class PublishedRagPipelineRunApi(Resource): # Run rag pipeline datasource # """ # # The role of the current user in the ta table must be admin, owner, or editor -# if not current_user.is_editor: +# if not current_user.has_edit_permission: # raise Forbidden() # # if not isinstance(current_user, Account): # raise Forbidden() # -# parser = reqparse.RequestParser() -# parser.add_argument("job_id", type=str, required=True, nullable=False, location="json") -# parser.add_argument("datasource_type", type=str, required=True, location="json") +# parser = (reqparse.RequestParser() +# .add_argument("job_id", type=str, required=True, nullable=False, location="json") +# .add_argument("datasource_type", type=str, required=True, location="json") +# ) # args = parser.parse_args() # # job_id = args.get("job_id") @@ -344,15 +350,16 @@ class PublishedRagPipelineRunApi(Resource): # Run rag pipeline datasource # """ # # The role of the current user in the ta table must be admin, owner, or editor -# if not current_user.is_editor: +# if not current_user.has_edit_permission: # raise Forbidden() # # if not isinstance(current_user, Account): # raise Forbidden() # -# parser = reqparse.RequestParser() -# parser.add_argument("job_id", type=str, required=True, nullable=False, location="json") -# parser.add_argument("datasource_type", type=str, required=True, location="json") +# parser = (reqparse.RequestParser() +# .add_argument("job_id", type=str, required=True, nullable=False, location="json") +# .add_argument("datasource_type", type=str, required=True, location="json") +# ) # args = parser.parse_args() # # job_id = args.get("job_id") @@ -385,13 +392,16 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource): Run rag pipeline datasource """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("credential_id", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("credential_id", type=str, required=False, location="json") + ) args = parser.parse_args() inputs = args.get("inputs") @@ -428,13 +438,16 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource): Run rag pipeline datasource """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("credential_id", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("credential_id", type=str, required=False, location="json") + ) args = parser.parse_args() inputs = args.get("inputs") @@ -472,11 +485,13 @@ class RagPipelineDraftNodeRunApi(Resource): Run draft workflow node """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "inputs", type=dict, required=True, nullable=False, location="json" + ) args = parser.parse_args() inputs = args.get("inputs") @@ -505,7 +520,8 @@ class RagPipelineTaskStopApi(Resource): Stop workflow task """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id) @@ -525,7 +541,8 @@ class PublishedRagPipelineApi(Resource): Get published pipeline """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() if not pipeline.is_published: return None @@ -545,7 +562,8 @@ class PublishedRagPipelineApi(Resource): Publish workflow """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() rag_pipeline_service = RagPipelineService() @@ -580,7 +598,8 @@ class DefaultRagPipelineBlockConfigsApi(Resource): Get default block config """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() # Get default block configs @@ -599,11 +618,11 @@ class DefaultRagPipelineBlockConfigApi(Resource): Get default block config """ # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("q", type=str, location="args") + parser = reqparse.RequestParser().add_argument("q", type=str, location="args") args = parser.parse_args() q = args.get("q") @@ -631,14 +650,17 @@ class PublishedAllRagPipelineApi(Resource): """ Get published workflows """ - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") - parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") - parser.add_argument("user_id", type=str, required=False, location="args") - parser.add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") + .add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + .add_argument("user_id", type=str, required=False, location="args") + .add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args") + ) args = parser.parse_args() page = int(args.get("page", 1)) limit = int(args.get("limit", 10)) @@ -681,12 +703,15 @@ class RagPipelineByIdApi(Resource): Update workflow attributes """ # Check permission - if not isinstance(current_user, Account) or not current_user.has_edit_permission: + current_user, _ = current_account_with_tenant() + if not current_user.has_edit_permission: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("marked_name", type=str, required=False, location="json") - parser.add_argument("marked_comment", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("marked_name", type=str, required=False, location="json") + .add_argument("marked_comment", type=str, required=False, location="json") + ) args = parser.parse_args() # Validate name and comment length @@ -733,15 +758,12 @@ class PublishedRagPipelineSecondStepApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def get(self, pipeline: Pipeline): """ Get second step parameters of rag pipeline """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("node_id", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args") args = parser.parse_args() node_id = args.get("node_id") if not node_id: @@ -759,15 +781,12 @@ class PublishedRagPipelineFirstStepApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def get(self, pipeline: Pipeline): """ Get first step parameters of rag pipeline """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("node_id", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args") args = parser.parse_args() node_id = args.get("node_id") if not node_id: @@ -785,15 +804,12 @@ class DraftRagPipelineFirstStepApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def get(self, pipeline: Pipeline): """ Get first step parameters of rag pipeline """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("node_id", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args") args = parser.parse_args() node_id = args.get("node_id") if not node_id: @@ -811,15 +827,12 @@ class DraftRagPipelineSecondStepApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required def get(self, pipeline: Pipeline): """ Get second step parameters of rag pipeline """ - # The role of the current user in the ta table must be admin, owner, or editor - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("node_id", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args") args = parser.parse_args() node_id = args.get("node_id") if not node_id: @@ -843,9 +856,11 @@ class RagPipelineWorkflowRunListApi(Resource): """ Get workflow run list """ - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() rag_pipeline_service = RagPipelineService() @@ -880,7 +895,7 @@ class RagPipelineWorkflowRunNodeExecutionListApi(Resource): @account_initialization_required @get_rag_pipeline @marshal_with(workflow_run_node_execution_list_fields) - def get(self, pipeline: Pipeline, run_id): + def get(self, pipeline: Pipeline, run_id: str): """ Get workflow run node execution list """ @@ -903,14 +918,8 @@ class DatasourceListApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user - if not isinstance(user, Account): - raise Forbidden() - tenant_id = user.current_tenant_id - if not tenant_id: - raise Forbidden() - - return jsonable_encoder(RagPipelineManageService.list_rag_pipeline_datasources(tenant_id)) + _, current_tenant_id = current_account_with_tenant() + return jsonable_encoder(RagPipelineManageService.list_rag_pipeline_datasources(current_tenant_id)) @console_ns.route("/rag/pipelines//workflows/draft/nodes//last-run") @@ -940,9 +949,8 @@ class RagPipelineTransformApi(Resource): @setup_required @login_required @account_initialization_required - def post(self, dataset_id): - if not isinstance(current_user, Account): - raise Forbidden() + def post(self, dataset_id: str): + current_user, _ = current_account_with_tenant() if not (current_user.has_edit_permission or current_user.is_dataset_operator): raise Forbidden() @@ -959,19 +967,20 @@ class RagPipelineDatasourceVariableApi(Resource): @login_required @account_initialization_required @get_rag_pipeline + @edit_permission_required @marshal_with(workflow_run_node_execution_fields) def post(self, pipeline: Pipeline): """ Set datasource variables """ - if not isinstance(current_user, Account) or not current_user.has_edit_permission: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("datasource_info", type=dict, required=True, location="json") - parser.add_argument("start_node_id", type=str, required=True, location="json") - parser.add_argument("start_node_title", type=str, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("datasource_info", type=dict, required=True, location="json") + .add_argument("start_node_id", type=str, required=True, location="json") + .add_argument("start_node_title", type=str, required=True, location="json") + ) args = parser.parse_args() rag_pipeline_service = RagPipelineService() diff --git a/api/controllers/console/datasets/website.py b/api/controllers/console/datasets/website.py index b9c1f65bfd..fe6eaaa0de 100644 --- a/api/controllers/console/datasets/website.py +++ b/api/controllers/console/datasets/website.py @@ -31,17 +31,19 @@ class WebsiteCrawlApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument( - "provider", - type=str, - choices=["firecrawl", "watercrawl", "jinareader"], - required=True, - nullable=True, - location="json", + parser = ( + reqparse.RequestParser() + .add_argument( + "provider", + type=str, + choices=["firecrawl", "watercrawl", "jinareader"], + required=True, + nullable=True, + location="json", + ) + .add_argument("url", type=str, required=True, nullable=True, location="json") + .add_argument("options", type=dict, required=True, nullable=True, location="json") ) - parser.add_argument("url", type=str, required=True, nullable=True, location="json") - parser.add_argument("options", type=dict, required=True, nullable=True, location="json") args = parser.parse_args() # Create typed request and validate @@ -70,8 +72,7 @@ class WebsiteCrawlStatusApi(Resource): @login_required @account_initialization_required def get(self, job_id: str): - parser = reqparse.RequestParser() - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "provider", type=str, choices=["firecrawl", "watercrawl", "jinareader"], required=True, location="args" ) args = parser.parse_args() diff --git a/api/controllers/console/datasets/wraps.py b/api/controllers/console/datasets/wraps.py index 98abb3ef8d..a8c1298e3e 100644 --- a/api/controllers/console/datasets/wraps.py +++ b/api/controllers/console/datasets/wraps.py @@ -3,8 +3,7 @@ from functools import wraps from controllers.console.datasets.error import PipelineNotFoundError from extensions.ext_database import db -from libs.login import current_user -from models.account import Account +from libs.login import current_account_with_tenant from models.dataset import Pipeline @@ -17,8 +16,7 @@ def get_rag_pipeline( if not kwargs.get("pipeline_id"): raise ValueError("missing pipeline_id in path parameters") - if not isinstance(current_user, Account): - raise ValueError("current_user is not an account") + _, current_tenant_id = current_account_with_tenant() pipeline_id = kwargs.get("pipeline_id") pipeline_id = str(pipeline_id) @@ -27,7 +25,7 @@ def get_rag_pipeline( pipeline = ( db.session.query(Pipeline) - .where(Pipeline.id == pipeline_id, Pipeline.tenant_id == current_user.current_tenant_id) + .where(Pipeline.id == pipeline_id, Pipeline.tenant_id == current_tenant_id) .first() ) diff --git a/api/controllers/console/explore/audio.py b/api/controllers/console/explore/audio.py index 7c20fb49d8..2a248cf20d 100644 --- a/api/controllers/console/explore/audio.py +++ b/api/controllers/console/explore/audio.py @@ -81,11 +81,13 @@ class ChatTextApi(InstalledAppResource): app_model = installed_app.app try: - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=str, required=False, location="json") - parser.add_argument("voice", type=str, location="json") - parser.add_argument("text", type=str, location="json") - parser.add_argument("streaming", type=bool, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("message_id", type=str, required=False, location="json") + .add_argument("voice", type=str, location="json") + .add_argument("text", type=str, location="json") + .add_argument("streaming", type=bool, location="json") + ) args = parser.parse_args() message_id = args.get("message_id", None) diff --git a/api/controllers/console/explore/completion.py b/api/controllers/console/explore/completion.py index 1102b815eb..9386ecebae 100644 --- a/api/controllers/console/explore/completion.py +++ b/api/controllers/console/explore/completion.py @@ -49,12 +49,14 @@ class CompletionApi(InstalledAppResource): if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, location="json", default="") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - parser.add_argument("retriever_from", type=str, required=False, default="explore_app", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json") + .add_argument("query", type=str, location="json", default="") + .add_argument("files", type=list, required=False, location="json") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") + .add_argument("retriever_from", type=str, required=False, default="explore_app", location="json") + ) args = parser.parse_args() streaming = args["response_mode"] == "streaming" @@ -121,13 +123,15 @@ class ChatApi(InstalledAppResource): if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, required=True, location="json") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("conversation_id", type=uuid_value, location="json") - parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json") - parser.add_argument("retriever_from", type=str, required=False, default="explore_app", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json") + .add_argument("query", type=str, required=True, location="json") + .add_argument("files", type=list, required=False, location="json") + .add_argument("conversation_id", type=uuid_value, location="json") + .add_argument("parent_message_id", type=uuid_value, required=False, location="json") + .add_argument("retriever_from", type=str, required=False, default="explore_app", location="json") + ) args = parser.parse_args() args["auto_generate_name"] = False diff --git a/api/controllers/console/explore/conversation.py b/api/controllers/console/explore/conversation.py index feabea2524..5a39363cc2 100644 --- a/api/controllers/console/explore/conversation.py +++ b/api/controllers/console/explore/conversation.py @@ -31,10 +31,12 @@ class ConversationListApi(InstalledAppResource): if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - parser.add_argument("pinned", type=str, choices=["true", "false", None], location="args") + parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + .add_argument("pinned", type=str, choices=["true", "false", None], location="args") + ) args = parser.parse_args() pinned = None @@ -94,9 +96,11 @@ class ConversationRenameApi(InstalledAppResource): conversation_id = str(c_id) - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=False, location="json") - parser.add_argument("auto_generate", type=bool, required=False, default=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=False, location="json") + .add_argument("auto_generate", type=bool, required=False, default=False, location="json") + ) args = parser.parse_args() try: diff --git a/api/controllers/console/explore/installed_app.py b/api/controllers/console/explore/installed_app.py index c86c243c9b..3c95779475 100644 --- a/api/controllers/console/explore/installed_app.py +++ b/api/controllers/console/explore/installed_app.py @@ -12,10 +12,9 @@ from controllers.console.wraps import account_initialization_required, cloud_edi from extensions.ext_database import db from fields.installed_app_fields import installed_app_list_fields from libs.datetime_utils import naive_utc_now -from libs.login import current_user, login_required -from models import Account, App, InstalledApp, RecommendedApp +from libs.login import current_account_with_tenant, login_required +from models import App, InstalledApp, RecommendedApp from services.account_service import TenantService -from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService @@ -29,9 +28,7 @@ class InstalledAppsListApi(Resource): @marshal_with(installed_app_list_fields) def get(self): app_id = request.args.get("app_id", default=None, type=str) - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") - current_tenant_id = current_user.current_tenant_id + current_user, current_tenant_id = current_account_with_tenant() if app_id: installed_apps = db.session.scalars( @@ -69,31 +66,26 @@ class InstalledAppsListApi(Resource): # Pre-filter out apps without setting or with sso_verified filtered_installed_apps = [] - app_id_to_app_code = {} for installed_app in installed_app_list: app_id = installed_app["app"].id webapp_setting = webapp_settings.get(app_id) if not webapp_setting or webapp_setting.access_mode == "sso_verified": continue - app_code = AppService.get_app_code_by_id(str(app_id)) - app_id_to_app_code[app_id] = app_code filtered_installed_apps.append(installed_app) - app_codes = list(app_id_to_app_code.values()) - # Batch permission check + app_ids = [installed_app["app"].id for installed_app in filtered_installed_apps] permissions = EnterpriseService.WebAppAuth.batch_is_user_allowed_to_access_webapps( user_id=user_id, - app_codes=app_codes, + app_ids=app_ids, ) # Keep only allowed apps res = [] for installed_app in filtered_installed_apps: app_id = installed_app["app"].id - app_code = app_id_to_app_code[app_id] - if permissions.get(app_code): + if permissions.get(app_id): res.append(installed_app) installed_app_list = res @@ -113,17 +105,15 @@ class InstalledAppsListApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("apps") def post(self): - parser = reqparse.RequestParser() - parser.add_argument("app_id", type=str, required=True, help="Invalid app_id") + parser = reqparse.RequestParser().add_argument("app_id", type=str, required=True, help="Invalid app_id") args = parser.parse_args() recommended_app = db.session.query(RecommendedApp).where(RecommendedApp.app_id == args["app_id"]).first() if recommended_app is None: raise NotFound("App not found") - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") - current_tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() + app = db.session.query(App).where(App.id == args["app_id"]).first() if app is None: @@ -163,9 +153,8 @@ class InstalledAppApi(InstalledAppResource): """ def delete(self, installed_app): - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") - if installed_app.app_owner_tenant_id == current_user.current_tenant_id: + _, current_tenant_id = current_account_with_tenant() + if installed_app.app_owner_tenant_id == current_tenant_id: raise BadRequest("You can't uninstall an app owned by the current tenant") db.session.delete(installed_app) @@ -174,8 +163,7 @@ class InstalledAppApi(InstalledAppResource): return {"result": "success", "message": "App uninstalled successfully"}, 204 def patch(self, installed_app): - parser = reqparse.RequestParser() - parser.add_argument("is_pinned", type=inputs.boolean) + parser = reqparse.RequestParser().add_argument("is_pinned", type=inputs.boolean) args = parser.parse_args() commit_args = False diff --git a/api/controllers/console/explore/message.py b/api/controllers/console/explore/message.py index b045e47846..db854e09bb 100644 --- a/api/controllers/console/explore/message.py +++ b/api/controllers/console/explore/message.py @@ -23,8 +23,7 @@ from core.model_runtime.errors.invoke import InvokeError from fields.message_fields import message_infinite_scroll_pagination_fields from libs import helper from libs.helper import uuid_value -from libs.login import current_user -from models import Account +from libs.login import current_account_with_tenant from models.model import AppMode from services.app_generate_service import AppGenerateService from services.errors.app import MoreLikeThisDisabledError @@ -48,21 +47,22 @@ logger = logging.getLogger(__name__) class MessageListApi(InstalledAppResource): @marshal_with(message_infinite_scroll_pagination_fields) def get(self, installed_app): + current_user, _ = current_account_with_tenant() app_model = installed_app.app app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("conversation_id", required=True, type=uuid_value, location="args") - parser.add_argument("first_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("conversation_id", required=True, type=uuid_value, location="args") + .add_argument("first_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") return MessageService.pagination_by_first_id( app_model, current_user, args["conversation_id"], args["first_id"], args["limit"] ) @@ -78,18 +78,19 @@ class MessageListApi(InstalledAppResource): ) class MessageFeedbackApi(InstalledAppResource): def post(self, installed_app, message_id): + current_user, _ = current_account_with_tenant() app_model = installed_app.app message_id = str(message_id) - parser = reqparse.RequestParser() - parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json") - parser.add_argument("content", type=str, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("rating", type=str, choices=["like", "dislike", None], location="json") + .add_argument("content", type=str, location="json") + ) args = parser.parse_args() try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") MessageService.create_feedback( app_model=app_model, message_id=message_id, @@ -109,14 +110,14 @@ class MessageFeedbackApi(InstalledAppResource): ) class MessageMoreLikeThisApi(InstalledAppResource): def get(self, installed_app, message_id): + current_user, _ = current_account_with_tenant() app_model = installed_app.app if app_model.mode != "completion": raise NotCompletionAppError() message_id = str(message_id) - parser = reqparse.RequestParser() - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "response_mode", type=str, required=True, choices=["blocking", "streaming"], location="args" ) args = parser.parse_args() @@ -124,8 +125,6 @@ class MessageMoreLikeThisApi(InstalledAppResource): streaming = args["response_mode"] == "streaming" try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") response = AppGenerateService.generate_more_like_this( app_model=app_model, user=current_user, @@ -159,6 +158,7 @@ class MessageMoreLikeThisApi(InstalledAppResource): ) class MessageSuggestedQuestionApi(InstalledAppResource): def get(self, installed_app, message_id): + current_user, _ = current_account_with_tenant() app_model = installed_app.app app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -167,8 +167,6 @@ class MessageSuggestedQuestionApi(InstalledAppResource): message_id = str(message_id) try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") questions = MessageService.get_suggested_questions_after_answer( app_model=app_model, user=current_user, message_id=message_id, invoke_from=InvokeFrom.EXPLORE ) diff --git a/api/controllers/console/explore/recommended_app.py b/api/controllers/console/explore/recommended_app.py index 6d627a929a..751012757a 100644 --- a/api/controllers/console/explore/recommended_app.py +++ b/api/controllers/console/explore/recommended_app.py @@ -42,8 +42,7 @@ class RecommendedAppListApi(Resource): @marshal_with(recommended_app_list_fields) def get(self): # language args - parser = reqparse.RequestParser() - parser.add_argument("language", type=str, location="args") + parser = reqparse.RequestParser().add_argument("language", type=str, location="args") args = parser.parse_args() language = args.get("language") diff --git a/api/controllers/console/explore/saved_message.py b/api/controllers/console/explore/saved_message.py index 79e4a4339e..9775c951f7 100644 --- a/api/controllers/console/explore/saved_message.py +++ b/api/controllers/console/explore/saved_message.py @@ -7,8 +7,7 @@ from controllers.console.explore.error import NotCompletionAppError from controllers.console.explore.wraps import InstalledAppResource from fields.conversation_fields import message_file_fields from libs.helper import TimestampField, uuid_value -from libs.login import current_user -from models import Account +from libs.login import current_account_with_tenant from services.errors.message import MessageNotExistsError from services.saved_message_service import SavedMessageService @@ -35,31 +34,30 @@ class SavedMessageListApi(InstalledAppResource): @marshal_with(saved_message_infinite_scroll_pagination_fields) def get(self, installed_app): + current_user, _ = current_account_with_tenant() app_model = installed_app.app if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") return SavedMessageService.pagination_by_last_id(app_model, current_user, args["last_id"], args["limit"]) def post(self, installed_app): + current_user, _ = current_account_with_tenant() app_model = installed_app.app if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=uuid_value, required=True, location="json") + parser = reqparse.RequestParser().add_argument("message_id", type=uuid_value, required=True, location="json") args = parser.parse_args() try: - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") SavedMessageService.save(app_model, current_user, args["message_id"]) except MessageNotExistsError: raise NotFound("Message Not Exists.") @@ -72,6 +70,7 @@ class SavedMessageListApi(InstalledAppResource): ) class SavedMessageApi(InstalledAppResource): def delete(self, installed_app, message_id): + current_user, _ = current_account_with_tenant() app_model = installed_app.app message_id = str(message_id) @@ -79,8 +78,6 @@ class SavedMessageApi(InstalledAppResource): if app_model.mode != "completion": raise NotCompletionAppError() - if not isinstance(current_user, Account): - raise ValueError("current_user must be an Account instance") SavedMessageService.delete(app_model, current_user, message_id) return {"result": "success"}, 204 diff --git a/api/controllers/console/explore/workflow.py b/api/controllers/console/explore/workflow.py index e32f2814eb..3022d937b9 100644 --- a/api/controllers/console/explore/workflow.py +++ b/api/controllers/console/explore/workflow.py @@ -22,7 +22,7 @@ from core.errors.error import ( from core.model_runtime.errors.invoke import InvokeError from core.workflow.graph_engine.manager import GraphEngineManager from libs import helper -from libs.login import current_user +from libs.login import current_user as current_user_ from models.model import AppMode, InstalledApp from services.app_generate_service import AppGenerateService from services.errors.llm import InvokeRateLimitError @@ -31,6 +31,8 @@ from .. import console_ns logger = logging.getLogger(__name__) +current_user = current_user_._get_current_object() # type: ignore + @console_ns.route("/installed-apps//workflows/run") class InstalledAppWorkflowRunApi(InstalledAppResource): @@ -45,9 +47,11 @@ class InstalledAppWorkflowRunApi(InstalledAppResource): if app_mode != AppMode.WORKFLOW: raise NotWorkflowAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("files", type=list, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("files", type=list, required=False, location="json") + ) args = parser.parse_args() assert current_user is not None try: diff --git a/api/controllers/console/explore/wraps.py b/api/controllers/console/explore/wraps.py index 5956eb52c4..2a97d312aa 100644 --- a/api/controllers/console/explore/wraps.py +++ b/api/controllers/console/explore/wraps.py @@ -8,10 +8,8 @@ from werkzeug.exceptions import NotFound from controllers.console.explore.error import AppAccessDeniedError from controllers.console.wraps import account_initialization_required from extensions.ext_database import db -from libs.login import current_user, login_required +from libs.login import current_account_with_tenant, login_required from models import InstalledApp -from models.account import Account -from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService @@ -24,13 +22,10 @@ def installed_app_required(view: Callable[Concatenate[InstalledApp, P], R] | Non def decorator(view: Callable[Concatenate[InstalledApp, P], R]): @wraps(view) def decorated(installed_app_id: str, *args: P.args, **kwargs: P.kwargs): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() installed_app = ( db.session.query(InstalledApp) - .where( - InstalledApp.id == str(installed_app_id), InstalledApp.tenant_id == current_user.current_tenant_id - ) + .where(InstalledApp.id == str(installed_app_id), InstalledApp.tenant_id == current_tenant_id) .first() ) @@ -56,14 +51,13 @@ def user_allowed_to_access_app(view: Callable[Concatenate[InstalledApp, P], R] | def decorator(view: Callable[Concatenate[InstalledApp, P], R]): @wraps(view) def decorated(installed_app: InstalledApp, *args: P.args, **kwargs: P.kwargs): + current_user, _ = current_account_with_tenant() feature = FeatureService.get_system_features() if feature.webapp_auth.enabled: - assert isinstance(current_user, Account) app_id = installed_app.app_id - app_code = AppService.get_app_code_by_id(app_id) res = EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp( user_id=str(current_user.id), - app_code=app_code, + app_id=app_id, ) if not res: raise AppAccessDeniedError() diff --git a/api/controllers/console/extension.py b/api/controllers/console/extension.py index c6b3cf7515..4e1a8aeb3e 100644 --- a/api/controllers/console/extension.py +++ b/api/controllers/console/extension.py @@ -4,8 +4,7 @@ from constants import HIDDEN_VALUE from controllers.console import api, console_ns from controllers.console.wraps import account_initialization_required, setup_required from fields.api_based_extension_fields import api_based_extension_fields -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from models.api_based_extension import APIBasedExtension from services.api_based_extension_service import APIBasedExtensionService from services.code_based_extension_service import CodeBasedExtensionService @@ -30,8 +29,7 @@ class CodeBasedExtensionAPI(Resource): @login_required @account_initialization_required def get(self): - parser = reqparse.RequestParser() - parser.add_argument("module", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("module", type=str, required=True, location="args") args = parser.parse_args() return {"module": args["module"], "data": CodeBasedExtensionService.get_code_based_extension(args["module"])} @@ -47,9 +45,7 @@ class APIBasedExtensionAPI(Resource): @account_initialization_required @marshal_with(api_based_extension_fields) def get(self): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() return APIBasedExtensionService.get_all_by_tenant_id(tenant_id) @api.doc("create_api_based_extension") @@ -70,16 +66,17 @@ class APIBasedExtensionAPI(Resource): @account_initialization_required @marshal_with(api_based_extension_fields) def post(self): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") - parser.add_argument("api_endpoint", type=str, required=True, location="json") - parser.add_argument("api_key", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=True, location="json") + .add_argument("api_endpoint", type=str, required=True, location="json") + .add_argument("api_key", type=str, required=True, location="json") + ) args = parser.parse_args() + _, current_tenant_id = current_account_with_tenant() extension_data = APIBasedExtension( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, name=args["name"], api_endpoint=args["api_endpoint"], api_key=args["api_key"], @@ -99,10 +96,8 @@ class APIBasedExtensionDetailAPI(Resource): @account_initialization_required @marshal_with(api_based_extension_fields) def get(self, id): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None api_based_extension_id = str(id) - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() return APIBasedExtensionService.get_with_tenant_id(tenant_id, api_based_extension_id) @@ -125,17 +120,17 @@ class APIBasedExtensionDetailAPI(Resource): @account_initialization_required @marshal_with(api_based_extension_fields) def post(self, id): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None api_based_extension_id = str(id) - tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() - extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(tenant_id, api_based_extension_id) + extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(current_tenant_id, api_based_extension_id) - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") - parser.add_argument("api_endpoint", type=str, required=True, location="json") - parser.add_argument("api_key", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=True, location="json") + .add_argument("api_endpoint", type=str, required=True, location="json") + .add_argument("api_key", type=str, required=True, location="json") + ) args = parser.parse_args() extension_data_from_db.name = args["name"] @@ -154,12 +149,10 @@ class APIBasedExtensionDetailAPI(Resource): @login_required @account_initialization_required def delete(self, id): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None api_based_extension_id = str(id) - tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() - extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(tenant_id, api_based_extension_id) + extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(current_tenant_id, api_based_extension_id) APIBasedExtensionService.delete(extension_data_from_db) diff --git a/api/controllers/console/feature.py b/api/controllers/console/feature.py index 80847b8fef..39bcf3424c 100644 --- a/api/controllers/console/feature.py +++ b/api/controllers/console/feature.py @@ -1,7 +1,6 @@ from flask_restx import Resource, fields -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from services.feature_service import FeatureService from . import api, console_ns @@ -23,9 +22,9 @@ class FeatureApi(Resource): @cloud_utm_record def get(self): """Get feature configuration for current tenant""" - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None - return FeatureService.get_features(current_user.current_tenant_id).model_dump() + _, current_tenant_id = current_account_with_tenant() + + return FeatureService.get_features(current_tenant_id).model_dump() @console_ns.route("/system-features") diff --git a/api/controllers/console/files.py b/api/controllers/console/files.py index 34f186e2f0..1cd193f7ad 100644 --- a/api/controllers/console/files.py +++ b/api/controllers/console/files.py @@ -1,7 +1,6 @@ from typing import Literal from flask import request -from flask_login import current_user from flask_restx import Resource, marshal_with from werkzeug.exceptions import Forbidden @@ -22,8 +21,7 @@ from controllers.console.wraps import ( ) from extensions.ext_database import db from fields.file_fields import file_fields, upload_config_fields -from libs.login import login_required -from models import Account +from libs.login import current_account_with_tenant, login_required from services.file_service import FileService from . import console_ns @@ -53,6 +51,7 @@ class FileApi(Resource): @marshal_with(file_fields) @cloud_edition_billing_resource_check("documents") def post(self): + current_user, _ = current_account_with_tenant() source_str = request.form.get("source") source: Literal["datasets"] | None = "datasets" if source_str == "datasets" else None @@ -65,16 +64,12 @@ class FileApi(Resource): if not file.filename: raise FilenameNotExistsError - if source == "datasets" and not current_user.is_dataset_editor: raise Forbidden() if source not in ("datasets", None): source = None - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - try: upload_file = FileService(db.engine).upload_file( filename=file.filename, @@ -108,4 +103,4 @@ class FileSupportTypeApi(Resource): @login_required @account_initialization_required def get(self): - return {"allowed_extensions": DOCUMENT_EXTENSIONS} + return {"allowed_extensions": list(DOCUMENT_EXTENSIONS)} diff --git a/api/controllers/console/init_validate.py b/api/controllers/console/init_validate.py index 30b53458b2..f219425d07 100644 --- a/api/controllers/console/init_validate.py +++ b/api/controllers/console/init_validate.py @@ -57,8 +57,7 @@ class InitValidateAPI(Resource): if tenant_count > 0: raise AlreadySetupError() - parser = reqparse.RequestParser() - parser.add_argument("password", type=StrLen(30), required=True, location="json") + parser = reqparse.RequestParser().add_argument("password", type=StrLen(30), required=True, location="json") input_password = parser.parse_args()["password"] if input_password != os.environ.get("INIT_PASSWORD"): diff --git a/api/controllers/console/remote_files.py b/api/controllers/console/remote_files.py index 4d4bb5d779..96c86dc0db 100644 --- a/api/controllers/console/remote_files.py +++ b/api/controllers/console/remote_files.py @@ -14,8 +14,7 @@ from core.file import helpers as file_helpers from core.helper import ssrf_proxy from extensions.ext_database import db from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields -from libs.login import current_user -from models.account import Account +from libs.login import current_account_with_tenant from services.file_service import FileService from . import console_ns @@ -41,8 +40,7 @@ class RemoteFileInfoApi(Resource): class RemoteFileUploadApi(Resource): @marshal_with(file_fields_with_signed_url) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("url", type=str, required=True, help="URL is required") + parser = reqparse.RequestParser().add_argument("url", type=str, required=True, help="URL is required") args = parser.parse_args() url = args["url"] @@ -64,8 +62,7 @@ class RemoteFileUploadApi(Resource): content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content try: - assert isinstance(current_user, Account) - user = current_user + user, _ = current_account_with_tenant() upload_file = FileService(db.engine).upload_file( filename=file_info.filename, content=content, diff --git a/api/controllers/console/setup.py b/api/controllers/console/setup.py index bff5fc1651..6d2b22bde3 100644 --- a/api/controllers/console/setup.py +++ b/api/controllers/console/setup.py @@ -69,10 +69,12 @@ class SetupApi(Resource): if not get_init_validate_status(): raise NotInitValidateError() - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("name", type=StrLen(30), required=True, location="json") - parser.add_argument("password", type=valid_password, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("name", type=StrLen(30), required=True, location="json") + .add_argument("password", type=valid_password, required=True, location="json") + ) args = parser.parse_args() # setup diff --git a/api/controllers/console/tag/tags.py b/api/controllers/console/tag/tags.py index b6086c5766..40ae7fb4d0 100644 --- a/api/controllers/console/tag/tags.py +++ b/api/controllers/console/tag/tags.py @@ -5,8 +5,7 @@ from werkzeug.exceptions import Forbidden from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from fields.tag_fields import dataset_tag_fields -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from models.model import Tag from services.tag_service import TagService @@ -24,11 +23,10 @@ class TagListApi(Resource): @account_initialization_required @marshal_with(dataset_tag_fields) def get(self): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() tag_type = request.args.get("type", type=str, default="") keyword = request.args.get("keyword", default=None, type=str) - tags = TagService.get_tags(tag_type, current_user.current_tenant_id, keyword) + tags = TagService.get_tags(tag_type, current_tenant_id, keyword) return tags, 200 @@ -36,18 +34,23 @@ class TagListApi(Resource): @login_required @account_initialization_required def post(self): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, or editor if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument( - "name", nullable=False, required=True, help="Name must be between 1 to 50 characters.", type=_validate_name - ) - parser.add_argument( - "type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type." + parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="Name must be between 1 to 50 characters.", + type=_validate_name, + ) + .add_argument( + "type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type." + ) ) args = parser.parse_args() tag = TagService.save_tags(args) @@ -63,15 +66,13 @@ class TagUpdateDeleteApi(Resource): @login_required @account_initialization_required def patch(self, tag_id): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, _ = current_account_with_tenant() tag_id = str(tag_id) # The role of the current user in the ta table must be admin, owner, or editor if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "name", nullable=False, required=True, help="Name must be between 1 to 50 characters.", type=_validate_name ) args = parser.parse_args() @@ -87,8 +88,7 @@ class TagUpdateDeleteApi(Resource): @login_required @account_initialization_required def delete(self, tag_id): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, _ = current_account_with_tenant() tag_id = str(tag_id) # The role of the current user in the ta table must be admin, owner, or editor if not current_user.has_edit_permission: @@ -105,21 +105,22 @@ class TagBindingCreateApi(Resource): @login_required @account_initialization_required def post(self): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument( - "tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required." - ) - parser.add_argument( - "target_id", type=str, nullable=False, required=True, location="json", help="Target ID is required." - ) - parser.add_argument( - "type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type." + parser = ( + reqparse.RequestParser() + .add_argument( + "tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required." + ) + .add_argument( + "target_id", type=str, nullable=False, required=True, location="json", help="Target ID is required." + ) + .add_argument( + "type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type." + ) ) args = parser.parse_args() TagService.save_tag_binding(args) @@ -133,17 +134,18 @@ class TagBindingDeleteApi(Resource): @login_required @account_initialization_required def post(self): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, _ = current_account_with_tenant() # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator if not (current_user.has_edit_permission or current_user.is_dataset_editor): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.") - parser.add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.") - parser.add_argument( - "type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type." + parser = ( + reqparse.RequestParser() + .add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.") + .add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.") + .add_argument( + "type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type." + ) ) args = parser.parse_args() TagService.delete_tag_binding(args) diff --git a/api/controllers/console/version.py b/api/controllers/console/version.py index 965a520f70..417486f59e 100644 --- a/api/controllers/console/version.py +++ b/api/controllers/console/version.py @@ -37,8 +37,7 @@ class VersionApi(Resource): ) def get(self): """Check for application version updates""" - parser = reqparse.RequestParser() - parser.add_argument("current_version", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument("current_version", type=str, required=True, location="args") args = parser.parse_args() check_update_url = dify_config.CHECK_UPDATE_URL diff --git a/api/controllers/console/workspace/__init__.py b/api/controllers/console/workspace/__init__.py index 4a048f3c5e..876e2301f2 100644 --- a/api/controllers/console/workspace/__init__.py +++ b/api/controllers/console/workspace/__init__.py @@ -2,11 +2,11 @@ from collections.abc import Callable from functools import wraps from typing import ParamSpec, TypeVar -from flask_login import current_user from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden from extensions.ext_database import db +from libs.login import current_account_with_tenant from models.account import TenantPluginPermission P = ParamSpec("P") @@ -20,8 +20,9 @@ def plugin_permission_required( def interceptor(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): + current_user, current_tenant_id = current_account_with_tenant() user = current_user - tenant_id = user.current_tenant_id + tenant_id = current_tenant_id with Session(db.engine) as session: permission = ( diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index e2b0e3f84d..499a52370f 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -2,7 +2,6 @@ from datetime import datetime import pytz from flask import request -from flask_login import current_user from flask_restx import Resource, fields, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session @@ -37,9 +36,8 @@ from extensions.ext_database import db from fields.member_fields import account_fields from libs.datetime_utils import naive_utc_now from libs.helper import TimestampField, email, extract_remote_ip, timezone -from libs.login import login_required -from models import AccountIntegrate, InvitationCode -from models.account import Account +from libs.login import current_account_with_tenant, login_required +from models import Account, AccountIntegrate, InvitationCode from services.account_service import AccountService from services.billing_service import BillingService from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError @@ -50,9 +48,7 @@ class AccountInitApi(Resource): @setup_required @login_required def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() if account.status == "active": raise AccountAlreadyInitedError() @@ -61,9 +57,9 @@ class AccountInitApi(Resource): if dify_config.EDITION == "CLOUD": parser.add_argument("invitation_code", type=str, location="json") - - parser.add_argument("interface_language", type=supported_language, required=True, location="json") - parser.add_argument("timezone", type=timezone, required=True, location="json") + parser.add_argument("interface_language", type=supported_language, required=True, location="json").add_argument( + "timezone", type=timezone, required=True, location="json" + ) args = parser.parse_args() if dify_config.EDITION == "CLOUD": @@ -106,8 +102,7 @@ class AccountProfileApi(Resource): @marshal_with(account_fields) @enterprise_license_required def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() return current_user @@ -118,10 +113,8 @@ class AccountNameApi(Resource): @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json") args = parser.parse_args() # Validate account name length @@ -140,10 +133,8 @@ class AccountAvatarApi(Resource): @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("avatar", type=str, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("avatar", type=str, required=True, location="json") args = parser.parse_args() updated_account = AccountService.update_account(current_user, avatar=args["avatar"]) @@ -158,10 +149,10 @@ class AccountInterfaceLanguageApi(Resource): @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("interface_language", type=supported_language, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument( + "interface_language", type=supported_language, required=True, location="json" + ) args = parser.parse_args() updated_account = AccountService.update_account(current_user, interface_language=args["interface_language"]) @@ -176,10 +167,10 @@ class AccountInterfaceThemeApi(Resource): @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("interface_theme", type=str, choices=["light", "dark"], required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument( + "interface_theme", type=str, choices=["light", "dark"], required=True, location="json" + ) args = parser.parse_args() updated_account = AccountService.update_account(current_user, interface_theme=args["interface_theme"]) @@ -194,10 +185,8 @@ class AccountTimezoneApi(Resource): @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("timezone", type=str, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("timezone", type=str, required=True, location="json") args = parser.parse_args() # Validate timezone string, e.g. America/New_York, Asia/Shanghai @@ -216,12 +205,13 @@ class AccountPasswordApi(Resource): @account_initialization_required @marshal_with(account_fields) def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("password", type=str, required=False, location="json") - parser.add_argument("new_password", type=str, required=True, location="json") - parser.add_argument("repeat_new_password", type=str, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("password", type=str, required=False, location="json") + .add_argument("new_password", type=str, required=True, location="json") + .add_argument("repeat_new_password", type=str, required=True, location="json") + ) args = parser.parse_args() if args["new_password"] != args["repeat_new_password"]: @@ -253,9 +243,7 @@ class AccountIntegrateApi(Resource): @account_initialization_required @marshal_with(integrate_list_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() account_integrates = db.session.scalars( select(AccountIntegrate).where(AccountIntegrate.account_id == account.id) @@ -298,9 +286,7 @@ class AccountDeleteVerifyApi(Resource): @login_required @account_initialization_required def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() token, code = AccountService.generate_account_deletion_verification_code(account) AccountService.send_account_deletion_verification_email(account, code) @@ -314,13 +300,13 @@ class AccountDeleteApi(Resource): @login_required @account_initialization_required def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("token", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("token", type=str, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + ) args = parser.parse_args() if not AccountService.verify_account_deletion_code(args["token"], args["code"]): @@ -335,9 +321,11 @@ class AccountDeleteApi(Resource): class AccountDeleteUpdateFeedbackApi(Resource): @setup_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") - parser.add_argument("feedback", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=str, required=True, location="json") + .add_argument("feedback", type=str, required=True, location="json") + ) args = parser.parse_args() BillingService.update_account_deletion_feedback(args["email"], args["feedback"]) @@ -358,9 +346,7 @@ class EducationVerifyApi(Resource): @cloud_edition_billing_enabled @marshal_with(verify_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() return BillingService.EducationIdentity.verify(account.id, account.email) @@ -380,14 +366,14 @@ class EducationApi(Resource): @only_edition_cloud @cloud_edition_billing_enabled def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("token", type=str, required=True, location="json") - parser.add_argument("institution", type=str, required=True, location="json") - parser.add_argument("role", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("token", type=str, required=True, location="json") + .add_argument("institution", type=str, required=True, location="json") + .add_argument("role", type=str, required=True, location="json") + ) args = parser.parse_args() return BillingService.EducationIdentity.activate(account, args["token"], args["institution"], args["role"]) @@ -399,9 +385,7 @@ class EducationApi(Resource): @cloud_edition_billing_enabled @marshal_with(status_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - account = current_user + account, _ = current_account_with_tenant() res = BillingService.EducationIdentity.status(account.id) # convert expire_at to UTC timestamp from isoformat @@ -425,10 +409,12 @@ class EducationAutoCompleteApi(Resource): @cloud_edition_billing_enabled @marshal_with(data_fields) def get(self): - parser = reqparse.RequestParser() - parser.add_argument("keywords", type=str, required=True, location="args") - parser.add_argument("page", type=int, required=False, location="args", default=0) - parser.add_argument("limit", type=int, required=False, location="args", default=20) + parser = ( + reqparse.RequestParser() + .add_argument("keywords", type=str, required=True, location="args") + .add_argument("page", type=int, required=False, location="args", default=0) + .add_argument("limit", type=int, required=False, location="args", default=20) + ) args = parser.parse_args() return BillingService.EducationIdentity.autocomplete(args["keywords"], args["page"], args["limit"]) @@ -441,11 +427,14 @@ class ChangeEmailSendEmailApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") - parser.add_argument("phase", type=str, required=False, location="json") - parser.add_argument("token", type=str, required=False, location="json") + current_user, _ = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + .add_argument("phase", type=str, required=False, location="json") + .add_argument("token", type=str, required=False, location="json") + ) args = parser.parse_args() ip_address = extract_remote_ip(request) @@ -467,8 +456,6 @@ class ChangeEmailSendEmailApi(Resource): raise InvalidTokenError() user_email = reset_data.get("email", "") - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") if user_email != current_user.email: raise InvalidEmailError() else: @@ -490,10 +477,12 @@ class ChangeEmailCheckApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() user_email = args["email"] @@ -533,9 +522,11 @@ class ChangeEmailResetApi(Resource): @account_initialization_required @marshal_with(account_fields) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("new_email", type=email, required=True, location="json") - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("new_email", type=email, required=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() if AccountService.is_account_in_freeze(args["new_email"]): @@ -551,8 +542,7 @@ class ChangeEmailResetApi(Resource): AccountService.revoke_change_email_token(args["token"]) old_email = reset_data.get("old_email", "") - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if current_user.email != old_email: raise AccountNotFound() @@ -569,8 +559,7 @@ class ChangeEmailResetApi(Resource): class CheckEmailUnique(Resource): @setup_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") + parser = reqparse.RequestParser().add_argument("email", type=email, required=True, location="json") args = parser.parse_args() if AccountService.is_account_in_freeze(args["email"]): raise AccountInFreezeError() diff --git a/api/controllers/console/workspace/agent_providers.py b/api/controllers/console/workspace/agent_providers.py index e044b2db5b..0a8f49d2e5 100644 --- a/api/controllers/console/workspace/agent_providers.py +++ b/api/controllers/console/workspace/agent_providers.py @@ -3,8 +3,7 @@ from flask_restx import Resource, fields from controllers.console import api, console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from services.agent_service import AgentService @@ -21,12 +20,11 @@ class AgentProviderListApi(Resource): @login_required @account_initialization_required def get(self): - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() user = current_user - assert user.current_tenant_id is not None user_id = user.id - tenant_id = user.current_tenant_id + tenant_id = current_tenant_id return jsonable_encoder(AgentService.list_agent_providers(user_id, tenant_id)) @@ -45,9 +43,5 @@ class AgentProviderApi(Resource): @login_required @account_initialization_required def get(self, provider_name: str): - assert isinstance(current_user, Account) - user = current_user - assert user.current_tenant_id is not None - user_id = user.id - tenant_id = user.current_tenant_id - return jsonable_encoder(AgentService.get_agent_provider(user_id, tenant_id, provider_name)) + current_user, current_tenant_id = current_account_with_tenant() + return jsonable_encoder(AgentService.get_agent_provider(current_user.id, current_tenant_id, provider_name)) diff --git a/api/controllers/console/workspace/endpoint.py b/api/controllers/console/workspace/endpoint.py index 782bd72565..d115f62d73 100644 --- a/api/controllers/console/workspace/endpoint.py +++ b/api/controllers/console/workspace/endpoint.py @@ -5,18 +5,10 @@ from controllers.console import api, console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.exc import PluginPermissionDeniedError -from libs.login import current_user, login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from services.plugin.endpoint_service import EndpointService -def _current_account_with_tenant() -> tuple[Account, str]: - assert isinstance(current_user, Account) - tenant_id = current_user.current_tenant_id - assert tenant_id is not None - return current_user, tenant_id - - @console_ns.route("/workspaces/current/endpoints/create") class EndpointCreateApi(Resource): @api.doc("create_endpoint") @@ -41,14 +33,16 @@ class EndpointCreateApi(Resource): @login_required @account_initialization_required def post(self): - user, tenant_id = _current_account_with_tenant() + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("plugin_unique_identifier", type=str, required=True) - parser.add_argument("settings", type=dict, required=True) - parser.add_argument("name", type=str, required=True) + parser = ( + reqparse.RequestParser() + .add_argument("plugin_unique_identifier", type=str, required=True) + .add_argument("settings", type=dict, required=True) + .add_argument("name", type=str, required=True) + ) args = parser.parse_args() plugin_unique_identifier = args["plugin_unique_identifier"] @@ -87,11 +81,13 @@ class EndpointListApi(Resource): @login_required @account_initialization_required def get(self): - user, tenant_id = _current_account_with_tenant() + user, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("page", type=int, required=True, location="args") - parser.add_argument("page_size", type=int, required=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("page", type=int, required=True, location="args") + .add_argument("page_size", type=int, required=True, location="args") + ) args = parser.parse_args() page = args["page"] @@ -130,12 +126,14 @@ class EndpointListForSinglePluginApi(Resource): @login_required @account_initialization_required def get(self): - user, tenant_id = _current_account_with_tenant() + user, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("page", type=int, required=True, location="args") - parser.add_argument("page_size", type=int, required=True, location="args") - parser.add_argument("plugin_id", type=str, required=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("page", type=int, required=True, location="args") + .add_argument("page_size", type=int, required=True, location="args") + .add_argument("plugin_id", type=str, required=True, location="args") + ) args = parser.parse_args() page = args["page"] @@ -172,10 +170,9 @@ class EndpointDeleteApi(Resource): @login_required @account_initialization_required def post(self): - user, tenant_id = _current_account_with_tenant() + user, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("endpoint_id", type=str, required=True) + parser = reqparse.RequestParser().add_argument("endpoint_id", type=str, required=True) args = parser.parse_args() if not user.is_admin_or_owner: @@ -212,12 +209,14 @@ class EndpointUpdateApi(Resource): @login_required @account_initialization_required def post(self): - user, tenant_id = _current_account_with_tenant() + user, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("endpoint_id", type=str, required=True) - parser.add_argument("settings", type=dict, required=True) - parser.add_argument("name", type=str, required=True) + parser = ( + reqparse.RequestParser() + .add_argument("endpoint_id", type=str, required=True) + .add_argument("settings", type=dict, required=True) + .add_argument("name", type=str, required=True) + ) args = parser.parse_args() endpoint_id = args["endpoint_id"] @@ -255,10 +254,9 @@ class EndpointEnableApi(Resource): @login_required @account_initialization_required def post(self): - user, tenant_id = _current_account_with_tenant() + user, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("endpoint_id", type=str, required=True) + parser = reqparse.RequestParser().add_argument("endpoint_id", type=str, required=True) args = parser.parse_args() endpoint_id = args["endpoint_id"] @@ -288,10 +286,9 @@ class EndpointDisableApi(Resource): @login_required @account_initialization_required def post(self): - user, tenant_id = _current_account_with_tenant() + user, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("endpoint_id", type=str, required=True) + parser = reqparse.RequestParser().add_argument("endpoint_id", type=str, required=True) args = parser.parse_args() endpoint_id = args["endpoint_id"] diff --git a/api/controllers/console/workspace/load_balancing_config.py b/api/controllers/console/workspace/load_balancing_config.py index 99a1c1f032..9bf393ea2e 100644 --- a/api/controllers/console/workspace/load_balancing_config.py +++ b/api/controllers/console/workspace/load_balancing_config.py @@ -5,8 +5,8 @@ from controllers.console import console_ns from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError -from libs.login import current_user, login_required -from models.account import Account, TenantAccountRole +from libs.login import current_account_with_tenant, login_required +from models import TenantAccountRole from services.model_load_balancing_service import ModelLoadBalancingService @@ -18,24 +18,25 @@ class LoadBalancingCredentialsValidateApi(Resource): @login_required @account_initialization_required def post(self, provider: str): - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() if not TenantAccountRole.is_privileged_role(current_user.current_role): raise Forbidden() - tenant_id = current_user.current_tenant_id - assert tenant_id is not None + tenant_id = current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") ) - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() # validate model load balancing credentials @@ -72,24 +73,25 @@ class LoadBalancingConfigCredentialsValidateApi(Resource): @login_required @account_initialization_required def post(self, provider: str, config_id: str): - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() if not TenantAccountRole.is_privileged_role(current_user.current_role): raise Forbidden() - tenant_id = current_user.current_tenant_id - assert tenant_id is not None + tenant_id = current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") ) - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() # validate model load balancing config credentials diff --git a/api/controllers/console/workspace/members.py b/api/controllers/console/workspace/members.py index dd6a878d87..d66f861799 100644 --- a/api/controllers/console/workspace/members.py +++ b/api/controllers/console/workspace/members.py @@ -25,7 +25,7 @@ from controllers.console.wraps import ( from extensions.ext_database import db from fields.member_fields import account_with_role_list_fields from libs.helper import extract_remote_ip -from libs.login import current_user, login_required +from libs.login import current_account_with_tenant, login_required from models.account import Account, TenantAccountRole from services.account_service import AccountService, RegisterService, TenantService from services.errors.account import AccountAlreadyInTenantError @@ -41,8 +41,7 @@ class MemberListApi(Resource): @account_initialization_required @marshal_with(account_with_role_list_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") members = TenantService.get_tenant_members(current_user.current_tenant) @@ -58,10 +57,12 @@ class MemberInviteEmailApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("members") def post(self): - parser = reqparse.RequestParser() - parser.add_argument("emails", type=list, required=True, location="json") - parser.add_argument("role", type=str, required=True, default="admin", location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("emails", type=list, required=True, location="json") + .add_argument("role", type=str, required=True, default="admin", location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() invitee_emails = args["emails"] @@ -69,9 +70,7 @@ class MemberInviteEmailApi(Resource): interface_language = args["language"] if not TenantAccountRole.is_non_owner_role(invitee_role): return {"code": "invalid-role", "message": "Invalid role"}, 400 - - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() inviter = current_user if not inviter.current_tenant: raise ValueError("No current tenant") @@ -120,8 +119,7 @@ class MemberCancelInviteApi(Resource): @login_required @account_initialization_required def delete(self, member_id): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") member = db.session.query(Account).where(Account.id == str(member_id)).first() @@ -153,16 +151,13 @@ class MemberUpdateRoleApi(Resource): @login_required @account_initialization_required def put(self, member_id): - parser = reqparse.RequestParser() - parser.add_argument("role", type=str, required=True, location="json") + parser = reqparse.RequestParser().add_argument("role", type=str, required=True, location="json") args = parser.parse_args() new_role = args["role"] if not TenantAccountRole.is_valid_role(new_role): return {"code": "invalid-role", "message": "Invalid role"}, 400 - - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") member = db.session.get(Account, str(member_id)) @@ -189,8 +184,7 @@ class DatasetOperatorMemberListApi(Resource): @account_initialization_required @marshal_with(account_with_role_list_fields) def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") members = TenantService.get_dataset_operator_members(current_user.current_tenant) @@ -206,16 +200,13 @@ class SendOwnerTransferEmailApi(Resource): @account_initialization_required @is_allow_transfer_owner def post(self): - parser = reqparse.RequestParser() - parser.add_argument("language", type=str, required=False, location="json") + parser = reqparse.RequestParser().add_argument("language", type=str, required=False, location="json") args = parser.parse_args() ip_address = extract_remote_ip(request) if AccountService.is_email_send_ip_limit(ip_address): raise EmailSendIpLimitError() - + current_user, _ = current_account_with_tenant() # check if the current user is the owner of the workspace - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") if not current_user.current_tenant: raise ValueError("No current tenant") if not TenantService.is_owner(current_user, current_user.current_tenant): @@ -245,13 +236,14 @@ class OwnerTransferCheckApi(Resource): @account_initialization_required @is_allow_transfer_owner def post(self): - parser = reqparse.RequestParser() - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() # check if the current user is the owner of the workspace - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") if not TenantService.is_owner(current_user, current_user.current_tenant): @@ -291,13 +283,13 @@ class OwnerTransfer(Resource): @account_initialization_required @is_allow_transfer_owner def post(self, member_id): - parser = reqparse.RequestParser() - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "token", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() # check if the current user is the owner of the workspace - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() if not current_user.current_tenant: raise ValueError("No current tenant") if not TenantService.is_owner(current_user, current_user.current_tenant): diff --git a/api/controllers/console/workspace/model_providers.py b/api/controllers/console/workspace/model_providers.py index 7012580362..04db975fc2 100644 --- a/api/controllers/console/workspace/model_providers.py +++ b/api/controllers/console/workspace/model_providers.py @@ -1,7 +1,6 @@ import io from flask import send_file -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden @@ -11,8 +10,7 @@ from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.utils.encoders import jsonable_encoder from libs.helper import StrLen, uuid_value -from libs.login import login_required -from models.account import Account +from libs.login import current_account_with_tenant, login_required from services.billing_service import BillingService from services.model_provider_service import ModelProviderService @@ -23,14 +21,10 @@ class ModelProviderListApi(Resource): @login_required @account_initialization_required def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() + tenant_id = current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "model_type", type=str, required=False, @@ -52,14 +46,12 @@ class ModelProviderCredentialApi(Resource): @login_required @account_initialization_required def get(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant_id = current_user.current_tenant_id + _, current_tenant_id = current_account_with_tenant() + tenant_id = current_tenant_id # if credential_id is not provided, return current used credential - parser = reqparse.RequestParser() - parser.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="args") + parser = reqparse.RequestParser().add_argument( + "credential_id", type=uuid_value, required=False, nullable=True, location="args" + ) args = parser.parse_args() model_provider_service = ModelProviderService() @@ -73,23 +65,22 @@ class ModelProviderCredentialApi(Resource): @login_required @account_initialization_required def post(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + ) args = parser.parse_args() model_provider_service = ModelProviderService() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") try: model_provider_service.create_provider_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, credentials=args["credentials"], credential_name=args["name"], @@ -103,24 +94,23 @@ class ModelProviderCredentialApi(Resource): @login_required @account_initialization_required def put(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + ) args = parser.parse_args() model_provider_service = ModelProviderService() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") try: model_provider_service.update_provider_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, credentials=args["credentials"], credential_id=args["credential_id"], @@ -135,19 +125,17 @@ class ModelProviderCredentialApi(Resource): @login_required @account_initialization_required def delete(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "credential_id", type=uuid_value, required=True, nullable=False, location="json" + ) args = parser.parse_args() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") model_provider_service = ModelProviderService() model_provider_service.remove_provider_credential( - tenant_id=current_user.current_tenant_id, provider=provider, credential_id=args["credential_id"] + tenant_id=current_tenant_id, provider=provider, credential_id=args["credential_id"] ) return {"result": "success"}, 204 @@ -159,19 +147,17 @@ class ModelProviderCredentialSwitchApi(Resource): @login_required @account_initialization_required def post(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "credential_id", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") service = ModelProviderService() service.switch_active_provider_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, credential_id=args["credential_id"], ) @@ -184,15 +170,13 @@ class ModelProviderValidateApi(Resource): @login_required @account_initialization_required def post(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") + _, current_tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument( + "credentials", type=dict, required=True, nullable=False, location="json" + ) args = parser.parse_args() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant_id = current_user.current_tenant_id + tenant_id = current_tenant_id model_provider_service = ModelProviderService() @@ -240,17 +224,13 @@ class PreferredProviderTypeUpdateApi(Resource): @login_required @account_initialization_required def post(self, provider: str): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant_id = current_user.current_tenant_id + tenant_id = current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "preferred_provider_type", type=str, required=True, @@ -276,14 +256,11 @@ class ModelProviderPaymentCheckoutUrlApi(Resource): def get(self, provider: str): if provider != "anthropic": raise ValueError(f"provider name {provider} is invalid") - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() BillingService.is_tenant_owner_or_admin(current_user) - if not current_user.current_tenant_id: - raise ValueError("No current tenant") data = BillingService.get_model_provider_payment_link( provider_name=provider, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, account_id=current_user.id, prefilled_email=current_user.email, ) diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py index d38bb16ea7..5ab958d585 100644 --- a/api/controllers/console/workspace/models.py +++ b/api/controllers/console/workspace/models.py @@ -1,6 +1,5 @@ import logging -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden @@ -10,7 +9,7 @@ from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.utils.encoders import jsonable_encoder from libs.helper import StrLen, uuid_value -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from services.model_load_balancing_service import ModelLoadBalancingService from services.model_provider_service import ModelProviderService @@ -23,8 +22,9 @@ class DefaultModelApi(Resource): @login_required @account_initialization_required def get(self): - parser = reqparse.RequestParser() - parser.add_argument( + _, tenant_id = current_account_with_tenant() + + parser = reqparse.RequestParser().add_argument( "model_type", type=str, required=True, @@ -34,8 +34,6 @@ class DefaultModelApi(Resource): ) args = parser.parse_args() - tenant_id = current_user.current_tenant_id - model_provider_service = ModelProviderService() default_model_entity = model_provider_service.get_default_model_of_model_type( tenant_id=tenant_id, model_type=args["model_type"] @@ -47,15 +45,15 @@ class DefaultModelApi(Resource): @login_required @account_initialization_required def post(self): + current_user, tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("model_settings", type=list, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "model_settings", type=list, required=True, nullable=False, location="json" + ) args = parser.parse_args() - - tenant_id = current_user.current_tenant_id - model_provider_service = ModelProviderService() model_settings = args["model_settings"] for model_setting in model_settings: @@ -92,7 +90,7 @@ class ModelProviderModelApi(Resource): @login_required @account_initialization_required def get(self, provider): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() model_provider_service = ModelProviderService() models = model_provider_service.get_models_by_provider(tenant_id=tenant_id, provider=provider) @@ -104,24 +102,26 @@ class ModelProviderModelApi(Resource): @account_initialization_required def post(self, provider: str): # To save the model's load balance configs + current_user, tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - tenant_id = current_user.current_tenant_id - - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("load_balancing", type=dict, required=False, nullable=True, location="json") + .add_argument("config_from", type=str, required=False, nullable=True, location="json") + .add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="json") ) - parser.add_argument("load_balancing", type=dict, required=False, nullable=True, location="json") - parser.add_argument("config_from", type=str, required=False, nullable=True, location="json") - parser.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="json") args = parser.parse_args() if args.get("config_from", "") == "custom-model": @@ -129,7 +129,7 @@ class ModelProviderModelApi(Resource): raise ValueError("credential_id is required when configuring a custom-model") service = ModelProviderService() service.switch_active_custom_model_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=tenant_id, provider=provider, model_type=args["model_type"], model=args["model"], @@ -164,20 +164,22 @@ class ModelProviderModelApi(Resource): @login_required @account_initialization_required def delete(self, provider: str): + current_user, tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - tenant_id = current_user.current_tenant_id - - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) ) args = parser.parse_args() @@ -195,20 +197,22 @@ class ModelProviderModelCredentialApi(Resource): @login_required @account_initialization_required def get(self, provider: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="args") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="args", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="args") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="args", + ) + .add_argument("config_from", type=str, required=False, nullable=True, location="args") + .add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="args") ) - parser.add_argument("config_from", type=str, required=False, nullable=True, location="args") - parser.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="args") args = parser.parse_args() model_provider_service = ModelProviderService() @@ -257,24 +261,27 @@ class ModelProviderModelCredentialApi(Resource): @login_required @account_initialization_required def post(self, provider: str): + current_user, tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") ) - parser.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() - tenant_id = current_user.current_tenant_id model_provider_service = ModelProviderService() try: @@ -301,29 +308,33 @@ class ModelProviderModelCredentialApi(Resource): @login_required @account_initialization_required def put(self, provider: str): + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") ) - parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") args = parser.parse_args() model_provider_service = ModelProviderService() try: model_provider_service.update_model_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, model_type=args["model_type"], model=args["model"], @@ -340,24 +351,28 @@ class ModelProviderModelCredentialApi(Resource): @login_required @account_initialization_required def delete(self, provider: str): + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") ) - parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") args = parser.parse_args() model_provider_service = ModelProviderService() model_provider_service.remove_model_credential( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, model_type=args["model_type"], model=args["model"], @@ -373,24 +388,28 @@ class ModelProviderModelCredentialSwitchApi(Resource): @login_required @account_initialization_required def post(self, provider: str): + current_user, current_tenant_id = current_account_with_tenant() + if not current_user.is_admin_or_owner: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("credential_id", type=str, required=True, nullable=False, location="json") ) - parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") args = parser.parse_args() service = ModelProviderService() service.add_model_credential_to_model_list( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=provider, model_type=args["model_type"], model=args["model"], @@ -407,17 +426,19 @@ class ModelProviderModelEnableApi(Resource): @login_required @account_initialization_required def patch(self, provider: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) ) args = parser.parse_args() @@ -437,17 +458,19 @@ class ModelProviderModelDisableApi(Resource): @login_required @account_initialization_required def patch(self, provider: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) ) args = parser.parse_args() @@ -465,19 +488,21 @@ class ModelProviderModelValidateApi(Resource): @login_required @account_initialization_required def post(self, provider: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="json") - parser.add_argument( - "model_type", - type=str, - required=True, - nullable=False, - choices=[mt.value for mt in ModelType], - location="json", + parser = ( + reqparse.RequestParser() + .add_argument("model", type=str, required=True, nullable=False, location="json") + .add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") ) - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") args = parser.parse_args() model_provider_service = ModelProviderService() @@ -511,11 +536,11 @@ class ModelProviderModelParameterRuleApi(Resource): @login_required @account_initialization_required def get(self, provider: str): - parser = reqparse.RequestParser() - parser.add_argument("model", type=str, required=True, nullable=False, location="args") + parser = reqparse.RequestParser().add_argument( + "model", type=str, required=True, nullable=False, location="args" + ) args = parser.parse_args() - - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() model_provider_service = ModelProviderService() parameter_rules = model_provider_service.get_model_parameter_rules( @@ -531,8 +556,7 @@ class ModelProviderAvailableModelApi(Resource): @login_required @account_initialization_required def get(self, model_type): - tenant_id = current_user.current_tenant_id - + _, tenant_id = current_account_with_tenant() model_provider_service = ModelProviderService() models = model_provider_service.get_models_by_model_type(tenant_id=tenant_id, model_type=model_type) diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index 7c70fb8aa0..e8bc312caf 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -1,7 +1,6 @@ import io from flask import request, send_file -from flask_login import current_user from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden @@ -11,7 +10,7 @@ from controllers.console.workspace import plugin_permission_required from controllers.console.wraps import account_initialization_required, setup_required from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.exc import PluginDaemonClientSideError -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.account import TenantPluginAutoUpgradeStrategy, TenantPluginPermission from services.plugin.plugin_auto_upgrade_service import PluginAutoUpgradeService from services.plugin.plugin_parameter_service import PluginParameterService @@ -26,7 +25,7 @@ class PluginDebuggingKeyApi(Resource): @account_initialization_required @plugin_permission_required(debug_required=True) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return { @@ -44,10 +43,12 @@ class PluginListApi(Resource): @login_required @account_initialization_required def get(self): - tenant_id = current_user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("page", type=int, required=False, location="args", default=1) - parser.add_argument("page_size", type=int, required=False, location="args", default=256) + _, tenant_id = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("page", type=int, required=False, location="args", default=1) + .add_argument("page_size", type=int, required=False, location="args", default=256) + ) args = parser.parse_args() try: plugins_with_total = PluginService.list_with_total(tenant_id, args["page"], args["page_size"]) @@ -63,8 +64,7 @@ class PluginListLatestVersionsApi(Resource): @login_required @account_initialization_required def post(self): - req = reqparse.RequestParser() - req.add_argument("plugin_ids", type=list, required=True, location="json") + req = reqparse.RequestParser().add_argument("plugin_ids", type=list, required=True, location="json") args = req.parse_args() try: @@ -81,10 +81,9 @@ class PluginListInstallationsFromIdsApi(Resource): @login_required @account_initialization_required def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("plugin_ids", type=list, required=True, location="json") + parser = reqparse.RequestParser().add_argument("plugin_ids", type=list, required=True, location="json") args = parser.parse_args() try: @@ -99,9 +98,11 @@ class PluginListInstallationsFromIdsApi(Resource): class PluginIconApi(Resource): @setup_required def get(self): - req = reqparse.RequestParser() - req.add_argument("tenant_id", type=str, required=True, location="args") - req.add_argument("filename", type=str, required=True, location="args") + req = ( + reqparse.RequestParser() + .add_argument("tenant_id", type=str, required=True, location="args") + .add_argument("filename", type=str, required=True, location="args") + ) args = req.parse_args() try: @@ -120,7 +121,7 @@ class PluginUploadFromPkgApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() file = request.files["pkg"] @@ -144,12 +145,14 @@ class PluginUploadFromGithubApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("repo", type=str, required=True, location="json") - parser.add_argument("version", type=str, required=True, location="json") - parser.add_argument("package", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("repo", type=str, required=True, location="json") + .add_argument("version", type=str, required=True, location="json") + .add_argument("package", type=str, required=True, location="json") + ) args = parser.parse_args() try: @@ -167,7 +170,7 @@ class PluginUploadFromBundleApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() file = request.files["bundle"] @@ -191,10 +194,11 @@ class PluginInstallFromPkgApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("plugin_unique_identifiers", type=list, required=True, location="json") + parser = reqparse.RequestParser().add_argument( + "plugin_unique_identifiers", type=list, required=True, location="json" + ) args = parser.parse_args() # check if all plugin_unique_identifiers are valid string @@ -217,13 +221,15 @@ class PluginInstallFromGithubApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("repo", type=str, required=True, location="json") - parser.add_argument("version", type=str, required=True, location="json") - parser.add_argument("package", type=str, required=True, location="json") - parser.add_argument("plugin_unique_identifier", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("repo", type=str, required=True, location="json") + .add_argument("version", type=str, required=True, location="json") + .add_argument("package", type=str, required=True, location="json") + .add_argument("plugin_unique_identifier", type=str, required=True, location="json") + ) args = parser.parse_args() try: @@ -247,10 +253,11 @@ class PluginInstallFromMarketplaceApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("plugin_unique_identifiers", type=list, required=True, location="json") + parser = reqparse.RequestParser().add_argument( + "plugin_unique_identifiers", type=list, required=True, location="json" + ) args = parser.parse_args() # check if all plugin_unique_identifiers are valid string @@ -273,10 +280,11 @@ class PluginFetchMarketplacePkgApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("plugin_unique_identifier", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument( + "plugin_unique_identifier", type=str, required=True, location="args" + ) args = parser.parse_args() try: @@ -299,10 +307,11 @@ class PluginFetchManifestApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("plugin_unique_identifier", type=str, required=True, location="args") + parser = reqparse.RequestParser().add_argument( + "plugin_unique_identifier", type=str, required=True, location="args" + ) args = parser.parse_args() try: @@ -324,11 +333,13 @@ class PluginFetchInstallTasksApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("page", type=int, required=True, location="args") - parser.add_argument("page_size", type=int, required=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("page", type=int, required=True, location="args") + .add_argument("page_size", type=int, required=True, location="args") + ) args = parser.parse_args() try: @@ -346,7 +357,7 @@ class PluginFetchInstallTaskApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def get(self, task_id: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return jsonable_encoder({"task": PluginService.fetch_install_task(tenant_id, task_id)}) @@ -361,7 +372,7 @@ class PluginDeleteInstallTaskApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def post(self, task_id: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return {"success": PluginService.delete_install_task(tenant_id, task_id)} @@ -376,7 +387,7 @@ class PluginDeleteAllInstallTaskItemsApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return {"success": PluginService.delete_all_install_task_items(tenant_id)} @@ -391,7 +402,7 @@ class PluginDeleteInstallTaskItemApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def post(self, task_id: str, identifier: str): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return {"success": PluginService.delete_install_task_item(tenant_id, task_id, identifier)} @@ -406,11 +417,13 @@ class PluginUpgradeFromMarketplaceApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("original_plugin_unique_identifier", type=str, required=True, location="json") - parser.add_argument("new_plugin_unique_identifier", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("original_plugin_unique_identifier", type=str, required=True, location="json") + .add_argument("new_plugin_unique_identifier", type=str, required=True, location="json") + ) args = parser.parse_args() try: @@ -430,14 +443,16 @@ class PluginUpgradeFromGithubApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - parser = reqparse.RequestParser() - parser.add_argument("original_plugin_unique_identifier", type=str, required=True, location="json") - parser.add_argument("new_plugin_unique_identifier", type=str, required=True, location="json") - parser.add_argument("repo", type=str, required=True, location="json") - parser.add_argument("version", type=str, required=True, location="json") - parser.add_argument("package", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("original_plugin_unique_identifier", type=str, required=True, location="json") + .add_argument("new_plugin_unique_identifier", type=str, required=True, location="json") + .add_argument("repo", type=str, required=True, location="json") + .add_argument("version", type=str, required=True, location="json") + .add_argument("package", type=str, required=True, location="json") + ) args = parser.parse_args() try: @@ -462,11 +477,10 @@ class PluginUninstallApi(Resource): @account_initialization_required @plugin_permission_required(install_required=True) def post(self): - req = reqparse.RequestParser() - req.add_argument("plugin_installation_id", type=str, required=True, location="json") + req = reqparse.RequestParser().add_argument("plugin_installation_id", type=str, required=True, location="json") args = req.parse_args() - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() try: return {"success": PluginService.uninstall(tenant_id, args["plugin_installation_id"])} @@ -480,19 +494,22 @@ class PluginChangePermissionApi(Resource): @login_required @account_initialization_required def post(self): + current_user, current_tenant_id = current_account_with_tenant() user = current_user if not user.is_admin_or_owner: raise Forbidden() - req = reqparse.RequestParser() - req.add_argument("install_permission", type=str, required=True, location="json") - req.add_argument("debug_permission", type=str, required=True, location="json") + req = ( + reqparse.RequestParser() + .add_argument("install_permission", type=str, required=True, location="json") + .add_argument("debug_permission", type=str, required=True, location="json") + ) args = req.parse_args() install_permission = TenantPluginPermission.InstallPermission(args["install_permission"]) debug_permission = TenantPluginPermission.DebugPermission(args["debug_permission"]) - tenant_id = user.current_tenant_id + tenant_id = current_tenant_id return {"success": PluginPermissionService.change_permission(tenant_id, install_permission, debug_permission)} @@ -503,7 +520,7 @@ class PluginFetchPermissionApi(Resource): @login_required @account_initialization_required def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() permission = PluginPermissionService.get_permission(tenant_id) if not permission: @@ -529,18 +546,20 @@ class PluginFetchDynamicSelectOptionsApi(Resource): @account_initialization_required def get(self): # check if the user is admin or owner + current_user, tenant_id = current_account_with_tenant() if not current_user.is_admin_or_owner: raise Forbidden() - tenant_id = current_user.current_tenant_id user_id = current_user.id - parser = reqparse.RequestParser() - parser.add_argument("plugin_id", type=str, required=True, location="args") - parser.add_argument("provider", type=str, required=True, location="args") - parser.add_argument("action", type=str, required=True, location="args") - parser.add_argument("parameter", type=str, required=True, location="args") - parser.add_argument("provider_type", type=str, required=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("plugin_id", type=str, required=True, location="args") + .add_argument("provider", type=str, required=True, location="args") + .add_argument("action", type=str, required=True, location="args") + .add_argument("parameter", type=str, required=True, location="args") + .add_argument("provider_type", type=str, required=True, location="args") + ) args = parser.parse_args() try: @@ -565,17 +584,17 @@ class PluginChangePreferencesApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() - req = reqparse.RequestParser() - req.add_argument("permission", type=dict, required=True, location="json") - req.add_argument("auto_upgrade", type=dict, required=True, location="json") + req = ( + reqparse.RequestParser() + .add_argument("permission", type=dict, required=True, location="json") + .add_argument("auto_upgrade", type=dict, required=True, location="json") + ) args = req.parse_args() - tenant_id = user.current_tenant_id - permission = args["permission"] install_permission = TenantPluginPermission.InstallPermission(permission.get("install_permission", "everyone")) @@ -621,7 +640,7 @@ class PluginFetchPreferencesApi(Resource): @login_required @account_initialization_required def get(self): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() permission = PluginPermissionService.get_permission(tenant_id) permission_dict = { @@ -661,10 +680,9 @@ class PluginAutoUpgradeExcludePluginApi(Resource): @account_initialization_required def post(self): # exclude one single plugin - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() - req = reqparse.RequestParser() - req.add_argument("plugin_id", type=str, required=True, location="json") + req = reqparse.RequestParser().add_argument("plugin_id", type=str, required=True, location="json") args = req.parse_args() return jsonable_encoder({"success": PluginAutoUpgradeService.exclude_plugin(tenant_id, args["plugin_id"])}) diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index 9285577f72..cc50131f0a 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -2,7 +2,6 @@ import io from urllib.parse import urlparse from flask import make_response, redirect, request, send_file -from flask_login import current_user from flask_restx import ( Resource, reqparse, @@ -24,7 +23,7 @@ from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.impl.oauth import OAuthHandler from core.tools.entities.tool_entities import CredentialType from libs.helper import StrLen, alphanumeric, uuid_value -from libs.login import login_required +from libs.login import current_account_with_tenant, login_required from models.provider_ids import ToolProviderID from services.plugin.oauth_service import OAuthProxyService from services.tools.api_tools_manage_service import ApiToolManageService @@ -53,13 +52,11 @@ class ToolProviderListApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - req = reqparse.RequestParser() - req.add_argument( + req = reqparse.RequestParser().add_argument( "type", type=str, choices=["builtin", "model", "api", "workflow", "mcp"], @@ -78,9 +75,7 @@ class ToolBuiltinProviderListToolsApi(Resource): @login_required @account_initialization_required def get(self, provider): - user = current_user - - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.list_builtin_tool_provider_tools( @@ -96,9 +91,7 @@ class ToolBuiltinProviderInfoApi(Resource): @login_required @account_initialization_required def get(self, provider): - user = current_user - - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder(BuiltinToolManageService.get_builtin_tool_provider_info(tenant_id, provider)) @@ -109,13 +102,13 @@ class ToolBuiltinProviderDeleteApi(Resource): @login_required @account_initialization_required def post(self, provider): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() - tenant_id = user.current_tenant_id - req = reqparse.RequestParser() - req.add_argument("credential_id", type=str, required=True, nullable=False, location="json") + req = reqparse.RequestParser().add_argument( + "credential_id", type=str, required=True, nullable=False, location="json" + ) args = req.parse_args() return BuiltinToolManageService.delete_builtin_tool_provider( @@ -131,15 +124,16 @@ class ToolBuiltinProviderAddApi(Resource): @login_required @account_initialization_required def post(self, provider): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("name", type=StrLen(30), required=False, nullable=False, location="json") - parser.add_argument("type", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("name", type=StrLen(30), required=False, nullable=False, location="json") + .add_argument("type", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() if args["type"] not in CredentialType.values(): @@ -161,18 +155,19 @@ class ToolBuiltinProviderUpdateApi(Resource): @login_required @account_initialization_required def post(self, provider): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") - parser.add_argument("credentials", type=dict, required=False, nullable=True, location="json") - parser.add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credential_id", type=str, required=True, nullable=False, location="json") + .add_argument("credentials", type=dict, required=False, nullable=True, location="json") + .add_argument("name", type=StrLen(30), required=False, nullable=True, location="json") + ) args = parser.parse_args() @@ -193,7 +188,7 @@ class ToolBuiltinProviderGetCredentialsApi(Resource): @login_required @account_initialization_required def get(self, provider): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.get_builtin_tool_provider_credentials( @@ -218,23 +213,24 @@ class ToolApiProviderAddApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("schema_type", type=str, required=True, nullable=False, location="json") - parser.add_argument("schema", type=str, required=True, nullable=False, location="json") - parser.add_argument("provider", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon", type=dict, required=True, nullable=False, location="json") - parser.add_argument("privacy_policy", type=str, required=False, nullable=True, location="json") - parser.add_argument("labels", type=list[str], required=False, nullable=True, location="json", default=[]) - parser.add_argument("custom_disclaimer", type=str, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("schema_type", type=str, required=True, nullable=False, location="json") + .add_argument("schema", type=str, required=True, nullable=False, location="json") + .add_argument("provider", type=str, required=True, nullable=False, location="json") + .add_argument("icon", type=dict, required=True, nullable=False, location="json") + .add_argument("privacy_policy", type=str, required=False, nullable=True, location="json") + .add_argument("labels", type=list[str], required=False, nullable=True, location="json", default=[]) + .add_argument("custom_disclaimer", type=str, required=False, nullable=True, location="json") + ) args = parser.parse_args() @@ -258,14 +254,11 @@ class ToolApiProviderGetRemoteSchemaApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - - parser.add_argument("url", type=str, required=True, nullable=False, location="args") + parser = reqparse.RequestParser().add_argument("url", type=str, required=True, nullable=False, location="args") args = parser.parse_args() @@ -282,14 +275,13 @@ class ToolApiProviderListToolsApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - - parser.add_argument("provider", type=str, required=True, nullable=False, location="args") + parser = reqparse.RequestParser().add_argument( + "provider", type=str, required=True, nullable=False, location="args" + ) args = parser.parse_args() @@ -308,24 +300,25 @@ class ToolApiProviderUpdateApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("schema_type", type=str, required=True, nullable=False, location="json") - parser.add_argument("schema", type=str, required=True, nullable=False, location="json") - parser.add_argument("provider", type=str, required=True, nullable=False, location="json") - parser.add_argument("original_provider", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon", type=dict, required=True, nullable=False, location="json") - parser.add_argument("privacy_policy", type=str, required=True, nullable=True, location="json") - parser.add_argument("labels", type=list[str], required=False, nullable=True, location="json") - parser.add_argument("custom_disclaimer", type=str, required=True, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("schema_type", type=str, required=True, nullable=False, location="json") + .add_argument("schema", type=str, required=True, nullable=False, location="json") + .add_argument("provider", type=str, required=True, nullable=False, location="json") + .add_argument("original_provider", type=str, required=True, nullable=False, location="json") + .add_argument("icon", type=dict, required=True, nullable=False, location="json") + .add_argument("privacy_policy", type=str, required=True, nullable=True, location="json") + .add_argument("labels", type=list[str], required=False, nullable=True, location="json") + .add_argument("custom_disclaimer", type=str, required=True, nullable=True, location="json") + ) args = parser.parse_args() @@ -350,17 +343,16 @@ class ToolApiProviderDeleteApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - - parser.add_argument("provider", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "provider", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() @@ -377,14 +369,13 @@ class ToolApiProviderGetApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - - parser.add_argument("provider", type=str, required=True, nullable=False, location="args") + parser = reqparse.RequestParser().add_argument( + "provider", type=str, required=True, nullable=False, location="args" + ) args = parser.parse_args() @@ -401,8 +392,7 @@ class ToolBuiltinProviderCredentialsSchemaApi(Resource): @login_required @account_initialization_required def get(self, provider, credential_type): - user = current_user - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.list_builtin_provider_credentials_schema( @@ -417,9 +407,9 @@ class ToolApiProviderSchemaApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - - parser.add_argument("schema", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "schema", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() @@ -434,19 +424,20 @@ class ToolApiProviderPreviousTestApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - - parser.add_argument("tool_name", type=str, required=True, nullable=False, location="json") - parser.add_argument("provider_name", type=str, required=False, nullable=False, location="json") - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - parser.add_argument("parameters", type=dict, required=True, nullable=False, location="json") - parser.add_argument("schema_type", type=str, required=True, nullable=False, location="json") - parser.add_argument("schema", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("tool_name", type=str, required=True, nullable=False, location="json") + .add_argument("provider_name", type=str, required=False, nullable=False, location="json") + .add_argument("credentials", type=dict, required=True, nullable=False, location="json") + .add_argument("parameters", type=dict, required=True, nullable=False, location="json") + .add_argument("schema_type", type=str, required=True, nullable=False, location="json") + .add_argument("schema", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() - + _, current_tenant_id = current_account_with_tenant() return ApiToolManageService.test_api_tool_preview( - current_user.current_tenant_id, + current_tenant_id, args["provider_name"] or "", args["tool_name"], args["credentials"], @@ -462,23 +453,24 @@ class ToolWorkflowProviderCreateApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - reqparser = reqparse.RequestParser() - reqparser.add_argument("workflow_app_id", type=uuid_value, required=True, nullable=False, location="json") - reqparser.add_argument("name", type=alphanumeric, required=True, nullable=False, location="json") - reqparser.add_argument("label", type=str, required=True, nullable=False, location="json") - reqparser.add_argument("description", type=str, required=True, nullable=False, location="json") - reqparser.add_argument("icon", type=dict, required=True, nullable=False, location="json") - reqparser.add_argument("parameters", type=list[dict], required=True, nullable=False, location="json") - reqparser.add_argument("privacy_policy", type=str, required=False, nullable=True, location="json", default="") - reqparser.add_argument("labels", type=list[str], required=False, nullable=True, location="json") + reqparser = ( + reqparse.RequestParser() + .add_argument("workflow_app_id", type=uuid_value, required=True, nullable=False, location="json") + .add_argument("name", type=alphanumeric, required=True, nullable=False, location="json") + .add_argument("label", type=str, required=True, nullable=False, location="json") + .add_argument("description", type=str, required=True, nullable=False, location="json") + .add_argument("icon", type=dict, required=True, nullable=False, location="json") + .add_argument("parameters", type=list[dict], required=True, nullable=False, location="json") + .add_argument("privacy_policy", type=str, required=False, nullable=True, location="json", default="") + .add_argument("labels", type=list[str], required=False, nullable=True, location="json") + ) args = reqparser.parse_args() @@ -502,23 +494,24 @@ class ToolWorkflowProviderUpdateApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - reqparser = reqparse.RequestParser() - reqparser.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json") - reqparser.add_argument("name", type=alphanumeric, required=True, nullable=False, location="json") - reqparser.add_argument("label", type=str, required=True, nullable=False, location="json") - reqparser.add_argument("description", type=str, required=True, nullable=False, location="json") - reqparser.add_argument("icon", type=dict, required=True, nullable=False, location="json") - reqparser.add_argument("parameters", type=list[dict], required=True, nullable=False, location="json") - reqparser.add_argument("privacy_policy", type=str, required=False, nullable=True, location="json", default="") - reqparser.add_argument("labels", type=list[str], required=False, nullable=True, location="json") + reqparser = ( + reqparse.RequestParser() + .add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json") + .add_argument("name", type=alphanumeric, required=True, nullable=False, location="json") + .add_argument("label", type=str, required=True, nullable=False, location="json") + .add_argument("description", type=str, required=True, nullable=False, location="json") + .add_argument("icon", type=dict, required=True, nullable=False, location="json") + .add_argument("parameters", type=list[dict], required=True, nullable=False, location="json") + .add_argument("privacy_policy", type=str, required=False, nullable=True, location="json", default="") + .add_argument("labels", type=list[str], required=False, nullable=True, location="json") + ) args = reqparser.parse_args() @@ -545,16 +538,16 @@ class ToolWorkflowProviderDeleteApi(Resource): @login_required @account_initialization_required def post(self): - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() user_id = user.id - tenant_id = user.current_tenant_id - reqparser = reqparse.RequestParser() - reqparser.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json") + reqparser = reqparse.RequestParser().add_argument( + "workflow_tool_id", type=uuid_value, required=True, nullable=False, location="json" + ) args = reqparser.parse_args() @@ -571,14 +564,15 @@ class ToolWorkflowProviderGetApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("workflow_tool_id", type=uuid_value, required=False, nullable=True, location="args") - parser.add_argument("workflow_app_id", type=uuid_value, required=False, nullable=True, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("workflow_tool_id", type=uuid_value, required=False, nullable=True, location="args") + .add_argument("workflow_app_id", type=uuid_value, required=False, nullable=True, location="args") + ) args = parser.parse_args() @@ -606,13 +600,13 @@ class ToolWorkflowProviderListToolApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id - parser = reqparse.RequestParser() - parser.add_argument("workflow_tool_id", type=uuid_value, required=True, nullable=False, location="args") + parser = reqparse.RequestParser().add_argument( + "workflow_tool_id", type=uuid_value, required=True, nullable=False, location="args" + ) args = parser.parse_args() @@ -631,10 +625,9 @@ class ToolBuiltinListApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id return jsonable_encoder( [ @@ -653,8 +646,7 @@ class ToolApiListApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( [ @@ -672,10 +664,9 @@ class ToolWorkflowListApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user + user, tenant_id = current_account_with_tenant() user_id = user.id - tenant_id = user.current_tenant_id return jsonable_encoder( [ @@ -709,19 +700,18 @@ class ToolPluginOAuthApi(Resource): provider_name = tool_provider.provider_name # todo check permission - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() - tenant_id = user.current_tenant_id oauth_client_params = BuiltinToolManageService.get_oauth_client(tenant_id=tenant_id, provider=provider) if oauth_client_params is None: raise Forbidden("no oauth available client config found for this tool provider") oauth_handler = OAuthHandler() context_id = OAuthProxyService.create_proxy_context( - user_id=current_user.id, tenant_id=tenant_id, plugin_id=plugin_id, provider=provider_name + user_id=user.id, tenant_id=tenant_id, plugin_id=plugin_id, provider=provider_name ) redirect_uri = f"{dify_config.CONSOLE_API_URL}/console/api/oauth/plugin/{provider}/tool/callback" authorization_url_response = oauth_handler.get_authorization_url( @@ -800,11 +790,11 @@ class ToolBuiltinProviderSetDefaultApi(Resource): @login_required @account_initialization_required def post(self, provider): - parser = reqparse.RequestParser() - parser.add_argument("id", type=str, required=True, nullable=False, location="json") + current_user, current_tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("id", type=str, required=True, nullable=False, location="json") args = parser.parse_args() return BuiltinToolManageService.set_default_provider( - tenant_id=current_user.current_tenant_id, user_id=current_user.id, provider=provider, id=args["id"] + tenant_id=current_tenant_id, user_id=current_user.id, provider=provider, id=args["id"] ) @@ -814,18 +804,20 @@ class ToolOAuthCustomClient(Resource): @login_required @account_initialization_required def post(self, provider): - parser = reqparse.RequestParser() - parser.add_argument("client_params", type=dict, required=False, nullable=True, location="json") - parser.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("client_params", type=dict, required=False, nullable=True, location="json") + .add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json") + ) args = parser.parse_args() - user = current_user + user, tenant_id = current_account_with_tenant() if not user.is_admin_or_owner: raise Forbidden() return BuiltinToolManageService.save_custom_oauth_client_params( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, provider=provider, client_params=args.get("client_params", {}), enable_oauth_custom_client=args.get("enable_oauth_custom_client", True), @@ -835,20 +827,18 @@ class ToolOAuthCustomClient(Resource): @login_required @account_initialization_required def get(self, provider): + _, current_tenant_id = current_account_with_tenant() return jsonable_encoder( - BuiltinToolManageService.get_custom_oauth_client_params( - tenant_id=current_user.current_tenant_id, provider=provider - ) + BuiltinToolManageService.get_custom_oauth_client_params(tenant_id=current_tenant_id, provider=provider) ) @setup_required @login_required @account_initialization_required def delete(self, provider): + _, current_tenant_id = current_account_with_tenant() return jsonable_encoder( - BuiltinToolManageService.delete_custom_oauth_client_params( - tenant_id=current_user.current_tenant_id, provider=provider - ) + BuiltinToolManageService.delete_custom_oauth_client_params(tenant_id=current_tenant_id, provider=provider) ) @@ -858,9 +848,10 @@ class ToolBuiltinProviderGetOauthClientSchemaApi(Resource): @login_required @account_initialization_required def get(self, provider): + _, current_tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.get_builtin_tool_provider_oauth_client_schema( - tenant_id=current_user.current_tenant_id, provider_name=provider + tenant_id=current_tenant_id, provider_name=provider ) ) @@ -871,7 +862,7 @@ class ToolBuiltinProviderGetCredentialInfoApi(Resource): @login_required @account_initialization_required def get(self, provider): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() return jsonable_encoder( BuiltinToolManageService.get_builtin_tool_provider_credential_info( @@ -887,25 +878,25 @@ class ToolProviderMCPApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("server_url", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon_type", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon_background", type=str, required=False, nullable=True, location="json", default="") - parser.add_argument("server_identifier", type=str, required=True, nullable=False, location="json") - parser.add_argument("timeout", type=float, required=False, nullable=False, location="json", default=30) - parser.add_argument( - "sse_read_timeout", type=float, required=False, nullable=False, location="json", default=300 + parser = ( + reqparse.RequestParser() + .add_argument("server_url", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=str, required=True, nullable=False, location="json") + .add_argument("icon", type=str, required=True, nullable=False, location="json") + .add_argument("icon_type", type=str, required=True, nullable=False, location="json") + .add_argument("icon_background", type=str, required=False, nullable=True, location="json", default="") + .add_argument("server_identifier", type=str, required=True, nullable=False, location="json") + .add_argument("timeout", type=float, required=False, nullable=False, location="json", default=30) + .add_argument("sse_read_timeout", type=float, required=False, nullable=False, location="json", default=300) + .add_argument("headers", type=dict, required=False, nullable=True, location="json", default={}) ) - parser.add_argument("headers", type=dict, required=False, nullable=True, location="json", default={}) args = parser.parse_args() - user = current_user + user, tenant_id = current_account_with_tenant() if not is_valid_url(args["server_url"]): raise ValueError("Server URL is not valid.") return jsonable_encoder( MCPToolManageService.create_mcp_provider( - tenant_id=user.current_tenant_id, + tenant_id=tenant_id, server_url=args["server_url"], name=args["name"], icon=args["icon"], @@ -923,25 +914,28 @@ class ToolProviderMCPApi(Resource): @login_required @account_initialization_required def put(self): - parser = reqparse.RequestParser() - parser.add_argument("server_url", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon_type", type=str, required=True, nullable=False, location="json") - parser.add_argument("icon_background", type=str, required=False, nullable=True, location="json") - parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json") - parser.add_argument("server_identifier", type=str, required=True, nullable=False, location="json") - parser.add_argument("timeout", type=float, required=False, nullable=True, location="json") - parser.add_argument("sse_read_timeout", type=float, required=False, nullable=True, location="json") - parser.add_argument("headers", type=dict, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("server_url", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=str, required=True, nullable=False, location="json") + .add_argument("icon", type=str, required=True, nullable=False, location="json") + .add_argument("icon_type", type=str, required=True, nullable=False, location="json") + .add_argument("icon_background", type=str, required=False, nullable=True, location="json") + .add_argument("provider_id", type=str, required=True, nullable=False, location="json") + .add_argument("server_identifier", type=str, required=True, nullable=False, location="json") + .add_argument("timeout", type=float, required=False, nullable=True, location="json") + .add_argument("sse_read_timeout", type=float, required=False, nullable=True, location="json") + .add_argument("headers", type=dict, required=False, nullable=True, location="json") + ) args = parser.parse_args() if not is_valid_url(args["server_url"]): if "[__HIDDEN__]" in args["server_url"]: pass else: raise ValueError("Server URL is not valid.") + _, current_tenant_id = current_account_with_tenant() MCPToolManageService.update_mcp_provider( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider_id=args["provider_id"], server_url=args["server_url"], name=args["name"], @@ -959,10 +953,12 @@ class ToolProviderMCPApi(Resource): @login_required @account_initialization_required def delete(self): - parser = reqparse.RequestParser() - parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json") + parser = reqparse.RequestParser().add_argument( + "provider_id", type=str, required=True, nullable=False, location="json" + ) args = parser.parse_args() - MCPToolManageService.delete_mcp_tool(tenant_id=current_user.current_tenant_id, provider_id=args["provider_id"]) + _, current_tenant_id = current_account_with_tenant() + MCPToolManageService.delete_mcp_tool(tenant_id=current_tenant_id, provider_id=args["provider_id"]) return {"result": "success"} @@ -972,12 +968,14 @@ class ToolMCPAuthApi(Resource): @login_required @account_initialization_required def post(self): - parser = reqparse.RequestParser() - parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json") - parser.add_argument("authorization_code", type=str, required=False, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("provider_id", type=str, required=True, nullable=False, location="json") + .add_argument("authorization_code", type=str, required=False, nullable=True, location="json") + ) args = parser.parse_args() provider_id = args["provider_id"] - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, tenant_id) if not provider: raise ValueError("provider not found") @@ -1018,8 +1016,8 @@ class ToolMCPDetailApi(Resource): @login_required @account_initialization_required def get(self, provider_id): - user = current_user - provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, user.current_tenant_id) + _, tenant_id = current_account_with_tenant() + provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, tenant_id) return jsonable_encoder(ToolTransformService.mcp_provider_to_user_provider(provider, for_list=True)) @@ -1029,8 +1027,7 @@ class ToolMCPListAllApi(Resource): @login_required @account_initialization_required def get(self): - user = current_user - tenant_id = user.current_tenant_id + _, tenant_id = current_account_with_tenant() tools = MCPToolManageService.retrieve_mcp_tools(tenant_id=tenant_id) @@ -1043,7 +1040,7 @@ class ToolMCPUpdateApi(Resource): @login_required @account_initialization_required def get(self, provider_id): - tenant_id = current_user.current_tenant_id + _, tenant_id = current_account_with_tenant() tools = MCPToolManageService.list_mcp_tool_from_remote_server( tenant_id=tenant_id, provider_id=provider_id, @@ -1054,9 +1051,11 @@ class ToolMCPUpdateApi(Resource): @console_ns.route("/mcp/oauth/callback") class ToolMCPCallbackApi(Resource): def get(self): - parser = reqparse.RequestParser() - parser.add_argument("code", type=str, required=True, nullable=False, location="args") - parser.add_argument("state", type=str, required=True, nullable=False, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("code", type=str, required=True, nullable=False, location="args") + .add_argument("state", type=str, required=True, nullable=False, location="args") + ) args = parser.parse_args() state_key = args["state"] authorization_code = args["code"] diff --git a/api/controllers/console/workspace/workspace.py b/api/controllers/console/workspace/workspace.py index 4a0539785a..f9856df9ea 100644 --- a/api/controllers/console/workspace/workspace.py +++ b/api/controllers/console/workspace/workspace.py @@ -23,8 +23,8 @@ from controllers.console.wraps import ( ) from extensions.ext_database import db from libs.helper import TimestampField -from libs.login import current_user, login_required -from models.account import Account, Tenant, TenantStatus +from libs.login import current_account_with_tenant, login_required +from models.account import Tenant, TenantStatus from services.account_service import TenantService from services.feature_service import FeatureService from services.file_service import FileService @@ -70,8 +70,7 @@ class TenantListApi(Resource): @login_required @account_initialization_required def get(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, current_tenant_id = current_account_with_tenant() tenants = TenantService.get_join_tenants(current_user) tenant_dicts = [] @@ -85,7 +84,7 @@ class TenantListApi(Resource): "status": tenant.status, "created_at": tenant.created_at, "plan": features.billing.subscription.plan if features.billing.enabled else "sandbox", - "current": tenant.id == current_user.current_tenant_id if current_user.current_tenant_id else False, + "current": tenant.id == current_tenant_id if current_tenant_id else False, } tenant_dicts.append(tenant_dict) @@ -98,9 +97,11 @@ class WorkspaceListApi(Resource): @setup_required @admin_required def get(self): - parser = reqparse.RequestParser() - parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") - parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args") + .add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() stmt = select(Tenant).order_by(Tenant.created_at.desc()) @@ -130,8 +131,7 @@ class TenantApi(Resource): if request.path == "/info": logger.warning("Deprecated URL /info was used.") - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() tenant = current_user.current_tenant if not tenant: raise ValueError("No current tenant") @@ -155,10 +155,8 @@ class SwitchWorkspaceApi(Resource): @login_required @account_initialization_required def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("tenant_id", type=str, required=True, location="json") + current_user, _ = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("tenant_id", type=str, required=True, location="json") args = parser.parse_args() # check if tenant_id is valid, 403 if not @@ -181,16 +179,14 @@ class CustomConfigWorkspaceApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("workspace_custom") def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("remove_webapp_brand", type=bool, location="json") - parser.add_argument("replace_webapp_logo", type=str, location="json") + _, current_tenant_id = current_account_with_tenant() + parser = ( + reqparse.RequestParser() + .add_argument("remove_webapp_brand", type=bool, location="json") + .add_argument("replace_webapp_logo", type=str, location="json") + ) args = parser.parse_args() - - if not current_user.current_tenant_id: - raise ValueError("No current tenant") - tenant = db.get_or_404(Tenant, current_user.current_tenant_id) + tenant = db.get_or_404(Tenant, current_tenant_id) custom_config_dict = { "remove_webapp_brand": args["remove_webapp_brand"], @@ -212,8 +208,7 @@ class WebappLogoWorkspaceApi(Resource): @account_initialization_required @cloud_edition_billing_resource_check("workspace_custom") def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") + current_user, _ = current_account_with_tenant() # check file if "file" not in request.files: raise NoFileUploadedError() @@ -253,15 +248,13 @@ class WorkspaceInfoApi(Resource): @account_initialization_required # Change workspace name def post(self): - if not isinstance(current_user, Account): - raise ValueError("Invalid user account") - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") + _, current_tenant_id = current_account_with_tenant() + parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json") args = parser.parse_args() - if not current_user.current_tenant_id: + if not current_tenant_id: raise ValueError("No current tenant") - tenant = db.get_or_404(Tenant, current_user.current_tenant_id) + tenant = db.get_or_404(Tenant, current_tenant_id) tenant.name = args["name"] db.session.commit() diff --git a/api/controllers/console/wraps.py b/api/controllers/console/wraps.py index 9e903d9286..8572a6dc9b 100644 --- a/api/controllers/console/wraps.py +++ b/api/controllers/console/wraps.py @@ -12,8 +12,8 @@ from configs import dify_config from controllers.console.workspace.error import AccountNotInitializedError from extensions.ext_database import db from extensions.ext_redis import redis_client -from libs.login import current_user -from models.account import Account, AccountStatus +from libs.login import current_account_with_tenant +from models.account import AccountStatus from models.dataset import RateLimitLog from models.model import DifySetup from services.feature_service import FeatureService, LicenseStatus @@ -25,18 +25,12 @@ P = ParamSpec("P") R = TypeVar("R") -def _current_account() -> Account: - assert isinstance(current_user, Account) - return current_user - - def account_initialization_required(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): # check account initialization - account = _current_account() - - if account.status == AccountStatus.UNINITIALIZED: + current_user, _ = current_account_with_tenant() + if current_user.status == AccountStatus.UNINITIALIZED: raise AccountNotInitializedError() return view(*args, **kwargs) @@ -80,9 +74,8 @@ def only_edition_self_hosted(view: Callable[P, R]): def cloud_edition_billing_enabled(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): - account = _current_account() - assert account.current_tenant_id is not None - features = FeatureService.get_features(account.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if not features.billing.enabled: abort(403, "Billing feature is not enabled.") return view(*args, **kwargs) @@ -94,10 +87,8 @@ def cloud_edition_billing_resource_check(resource: str): def interceptor(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): - account = _current_account() - assert account.current_tenant_id is not None - tenant_id = account.current_tenant_id - features = FeatureService.get_features(tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: members = features.members apps = features.apps @@ -138,9 +129,8 @@ def cloud_edition_billing_knowledge_limit_check(resource: str): def interceptor(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): - account = _current_account() - assert account.current_tenant_id is not None - features = FeatureService.get_features(account.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: if resource == "add_segment": if features.billing.subscription.plan == "sandbox": @@ -163,13 +153,11 @@ def cloud_edition_billing_rate_limit_check(resource: str): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): if resource == "knowledge": - account = _current_account() - assert account.current_tenant_id is not None - tenant_id = account.current_tenant_id - knowledge_rate_limit = FeatureService.get_knowledge_rate_limit(tenant_id) + _, current_tenant_id = current_account_with_tenant() + knowledge_rate_limit = FeatureService.get_knowledge_rate_limit(current_tenant_id) if knowledge_rate_limit.enabled: current_time = int(time.time() * 1000) - key = f"rate_limit_{tenant_id}" + key = f"rate_limit_{current_tenant_id}" redis_client.zadd(key, {current_time: current_time}) @@ -180,7 +168,7 @@ def cloud_edition_billing_rate_limit_check(resource: str): if request_count > knowledge_rate_limit.limit: # add ratelimit record rate_limit_log = RateLimitLog( - tenant_id=tenant_id, + tenant_id=current_tenant_id, subscription_plan=knowledge_rate_limit.subscription_plan, operation="knowledge", ) @@ -200,17 +188,15 @@ def cloud_utm_record(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): with contextlib.suppress(Exception): - account = _current_account() - assert account.current_tenant_id is not None - tenant_id = account.current_tenant_id - features = FeatureService.get_features(tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: utm_info = request.cookies.get("utm_info") if utm_info: utm_info_dict: dict = json.loads(utm_info) - OperationService.record_utm(tenant_id, utm_info_dict) + OperationService.record_utm(current_tenant_id, utm_info_dict) return view(*args, **kwargs) @@ -260,9 +246,9 @@ def email_password_login_enabled(view: Callable[P, R]): return decorated -def email_register_enabled(view): +def email_register_enabled(view: Callable[P, R]): @wraps(view) - def decorated(*args, **kwargs): + def decorated(*args: P.args, **kwargs: P.kwargs): features = FeatureService.get_system_features() if features.is_allow_register: return view(*args, **kwargs) @@ -289,9 +275,8 @@ def enable_change_email(view: Callable[P, R]): def is_allow_transfer_owner(view: Callable[P, R]): @wraps(view) def decorated(*args: P.args, **kwargs: P.kwargs): - account = _current_account() - assert account.current_tenant_id is not None - features = FeatureService.get_features(account.current_tenant_id) + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.is_allow_transfer_workspace: return view(*args, **kwargs) @@ -301,14 +286,31 @@ def is_allow_transfer_owner(view: Callable[P, R]): return decorated -def knowledge_pipeline_publish_enabled(view): +def knowledge_pipeline_publish_enabled(view: Callable[P, R]): @wraps(view) - def decorated(*args, **kwargs): - account = _current_account() - assert account.current_tenant_id is not None - features = FeatureService.get_features(account.current_tenant_id) + def decorated(*args: P.args, **kwargs: P.kwargs): + _, current_tenant_id = current_account_with_tenant() + features = FeatureService.get_features(current_tenant_id) if features.knowledge_pipeline.publish_enabled: return view(*args, **kwargs) abort(403) return decorated + + +def edit_permission_required(f: Callable[P, R]): + @wraps(f) + def decorated_function(*args: P.args, **kwargs: P.kwargs): + from werkzeug.exceptions import Forbidden + + from libs.login import current_user + from models import Account + + user = current_user._get_current_object() # type: ignore + if not isinstance(user, Account): + raise Forbidden() + if not current_user.has_edit_permission: + raise Forbidden() + return f(*args, **kwargs) + + return decorated_function diff --git a/api/controllers/files/image_preview.py b/api/controllers/files/image_preview.py index 0efee0c377..3db82456d5 100644 --- a/api/controllers/files/image_preview.py +++ b/api/controllers/files/image_preview.py @@ -46,11 +46,13 @@ class FilePreviewApi(Resource): def get(self, file_id): file_id = str(file_id) - parser = reqparse.RequestParser() - parser.add_argument("timestamp", type=str, required=True, location="args") - parser.add_argument("nonce", type=str, required=True, location="args") - parser.add_argument("sign", type=str, required=True, location="args") - parser.add_argument("as_attachment", type=bool, required=False, default=False, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("timestamp", type=str, required=True, location="args") + .add_argument("nonce", type=str, required=True, location="args") + .add_argument("sign", type=str, required=True, location="args") + .add_argument("as_attachment", type=bool, required=False, default=False, location="args") + ) args = parser.parse_args() diff --git a/api/controllers/files/tool_files.py b/api/controllers/files/tool_files.py index 42207b878c..dec5a4a1b2 100644 --- a/api/controllers/files/tool_files.py +++ b/api/controllers/files/tool_files.py @@ -16,12 +16,13 @@ class ToolFileApi(Resource): def get(self, file_id, extension): file_id = str(file_id) - parser = reqparse.RequestParser() - - parser.add_argument("timestamp", type=str, required=True, location="args") - parser.add_argument("nonce", type=str, required=True, location="args") - parser.add_argument("sign", type=str, required=True, location="args") - parser.add_argument("as_attachment", type=bool, required=False, default=False, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("timestamp", type=str, required=True, location="args") + .add_argument("nonce", type=str, required=True, location="args") + .add_argument("sign", type=str, required=True, location="args") + .add_argument("as_attachment", type=bool, required=False, default=False, location="args") + ) args = parser.parse_args() if not verify_tool_file_signature( diff --git a/api/controllers/files/upload.py b/api/controllers/files/upload.py index 206a5d1cc2..a09e24e2d9 100644 --- a/api/controllers/files/upload.py +++ b/api/controllers/files/upload.py @@ -18,19 +18,17 @@ from core.tools.tool_file_manager import ToolFileManager from fields.file_fields import build_file_model # Define parser for both documentation and validation -upload_parser = reqparse.RequestParser() -upload_parser.add_argument("file", location="files", type=FileStorage, required=True, help="File to upload") -upload_parser.add_argument( - "timestamp", type=str, required=True, location="args", help="Unix timestamp for signature verification" +upload_parser = ( + reqparse.RequestParser() + .add_argument("file", location="files", type=FileStorage, required=True, help="File to upload") + .add_argument( + "timestamp", type=str, required=True, location="args", help="Unix timestamp for signature verification" + ) + .add_argument("nonce", type=str, required=True, location="args", help="Random string for signature verification") + .add_argument("sign", type=str, required=True, location="args", help="HMAC signature for request validation") + .add_argument("tenant_id", type=str, required=True, location="args", help="Tenant identifier") + .add_argument("user_id", type=str, required=False, location="args", help="User identifier") ) -upload_parser.add_argument( - "nonce", type=str, required=True, location="args", help="Random string for signature verification" -) -upload_parser.add_argument( - "sign", type=str, required=True, location="args", help="HMAC signature for request validation" -) -upload_parser.add_argument("tenant_id", type=str, required=True, location="args", help="Tenant identifier") -upload_parser.add_argument("user_id", type=str, required=False, location="args", help="User identifier") @files_ns.route("/upload/for-plugin") diff --git a/api/controllers/inner_api/mail.py b/api/controllers/inner_api/mail.py index 0b2be03e43..7e40d81706 100644 --- a/api/controllers/inner_api/mail.py +++ b/api/controllers/inner_api/mail.py @@ -5,11 +5,13 @@ from controllers.inner_api import inner_api_ns from controllers.inner_api.wraps import billing_inner_api_only, enterprise_inner_api_only from tasks.mail_inner_task import send_inner_email_task -_mail_parser = reqparse.RequestParser() -_mail_parser.add_argument("to", type=str, action="append", required=True) -_mail_parser.add_argument("subject", type=str, required=True) -_mail_parser.add_argument("body", type=str, required=True) -_mail_parser.add_argument("substitutions", type=dict, required=False) +_mail_parser = ( + reqparse.RequestParser() + .add_argument("to", type=str, action="append", required=True) + .add_argument("subject", type=str, required=True) + .add_argument("body", type=str, required=True) + .add_argument("substitutions", type=dict, required=False) +) class BaseMail(Resource): @@ -17,7 +19,7 @@ class BaseMail(Resource): def post(self): args = _mail_parser.parse_args() - send_inner_email_task.delay( + send_inner_email_task.delay( # type: ignore to=args["to"], subject=args["subject"], body=args["body"], diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index deab50076d..e4fe8d44bf 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -31,7 +31,7 @@ from core.plugin.entities.request import ( ) from core.tools.entities.tool_entities import ToolProviderType from libs.helper import length_prefixed_response -from models.account import Account, Tenant +from models import Account, Tenant from models.model import EndUser diff --git a/api/controllers/inner_api/plugin/wraps.py b/api/controllers/inner_api/plugin/wraps.py index 1f588bedce..2a57bb745b 100644 --- a/api/controllers/inner_api/plugin/wraps.py +++ b/api/controllers/inner_api/plugin/wraps.py @@ -72,9 +72,11 @@ def get_user_tenant(view: Callable[P, R] | None = None): @wraps(view_func) def decorated_view(*args: P.args, **kwargs: P.kwargs): # fetch json body - parser = reqparse.RequestParser() - parser.add_argument("tenant_id", type=str, required=True, location="json") - parser.add_argument("user_id", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("tenant_id", type=str, required=True, location="json") + .add_argument("user_id", type=str, required=True, location="json") + ) p = parser.parse_args() diff --git a/api/controllers/inner_api/workspace/workspace.py b/api/controllers/inner_api/workspace/workspace.py index 47f0240cd2..8391a15919 100644 --- a/api/controllers/inner_api/workspace/workspace.py +++ b/api/controllers/inner_api/workspace/workspace.py @@ -7,7 +7,7 @@ from controllers.inner_api import inner_api_ns from controllers.inner_api.wraps import enterprise_inner_api_only from events.tenant_event import tenant_was_created from extensions.ext_database import db -from models.account import Account +from models import Account from services.account_service import TenantService @@ -25,9 +25,11 @@ class EnterpriseWorkspace(Resource): } ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") - parser.add_argument("owner_email", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=True, location="json") + .add_argument("owner_email", type=str, required=True, location="json") + ) args = parser.parse_args() account = db.session.query(Account).filter_by(email=args["owner_email"]).first() @@ -68,8 +70,7 @@ class EnterpriseWorkspaceNoOwnerEmail(Resource): } ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, location="json") + parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json") args = parser.parse_args() tenant = TenantService.create_tenant(args["name"], is_from_dashboard=True) diff --git a/api/controllers/mcp/mcp.py b/api/controllers/mcp/mcp.py index a8629dca20..85b7df229f 100644 --- a/api/controllers/mcp/mcp.py +++ b/api/controllers/mcp/mcp.py @@ -33,14 +33,12 @@ def int_or_str(value): # Define parser for both documentation and validation -mcp_request_parser = reqparse.RequestParser() -mcp_request_parser.add_argument( - "jsonrpc", type=str, required=True, location="json", help="JSON-RPC version (should be '2.0')" -) -mcp_request_parser.add_argument("method", type=str, required=True, location="json", help="The method to invoke") -mcp_request_parser.add_argument("params", type=dict, required=False, location="json", help="Parameters for the method") -mcp_request_parser.add_argument( - "id", type=int_or_str, required=False, location="json", help="Request ID for tracking responses" +mcp_request_parser = ( + reqparse.RequestParser() + .add_argument("jsonrpc", type=str, required=True, location="json", help="JSON-RPC version (should be '2.0')") + .add_argument("method", type=str, required=True, location="json", help="The method to invoke") + .add_argument("params", type=dict, required=False, location="json", help="Parameters for the method") + .add_argument("id", type=int_or_str, required=False, location="json", help="Request ID for tracking responses") ) diff --git a/api/controllers/service_api/app/annotation.py b/api/controllers/service_api/app/annotation.py index ad1bdc7334..ed013b1674 100644 --- a/api/controllers/service_api/app/annotation.py +++ b/api/controllers/service_api/app/annotation.py @@ -10,24 +10,24 @@ from controllers.service_api.wraps import validate_app_token from extensions.ext_redis import redis_client from fields.annotation_fields import annotation_fields, build_annotation_model from libs.login import current_user -from models.account import Account +from models import Account from models.model import App from services.annotation_service import AppAnnotationService # Define parsers for annotation API -annotation_create_parser = reqparse.RequestParser() -annotation_create_parser.add_argument("question", required=True, type=str, location="json", help="Annotation question") -annotation_create_parser.add_argument("answer", required=True, type=str, location="json", help="Annotation answer") +annotation_create_parser = ( + reqparse.RequestParser() + .add_argument("question", required=True, type=str, location="json", help="Annotation question") + .add_argument("answer", required=True, type=str, location="json", help="Annotation answer") +) -annotation_reply_action_parser = reqparse.RequestParser() -annotation_reply_action_parser.add_argument( - "score_threshold", required=True, type=float, location="json", help="Score threshold for annotation matching" -) -annotation_reply_action_parser.add_argument( - "embedding_provider_name", required=True, type=str, location="json", help="Embedding provider name" -) -annotation_reply_action_parser.add_argument( - "embedding_model_name", required=True, type=str, location="json", help="Embedding model name" +annotation_reply_action_parser = ( + reqparse.RequestParser() + .add_argument( + "score_threshold", required=True, type=float, location="json", help="Score threshold for annotation matching" + ) + .add_argument("embedding_provider_name", required=True, type=str, location="json", help="Embedding provider name") + .add_argument("embedding_model_name", required=True, type=str, location="json", help="Embedding model name") ) diff --git a/api/controllers/service_api/app/audio.py b/api/controllers/service_api/app/audio.py index 33035123d7..c069a7ddfb 100644 --- a/api/controllers/service_api/app/audio.py +++ b/api/controllers/service_api/app/audio.py @@ -85,11 +85,13 @@ class AudioApi(Resource): # Define parser for text-to-audio API -text_to_audio_parser = reqparse.RequestParser() -text_to_audio_parser.add_argument("message_id", type=str, required=False, location="json", help="Message ID") -text_to_audio_parser.add_argument("voice", type=str, location="json", help="Voice to use for TTS") -text_to_audio_parser.add_argument("text", type=str, location="json", help="Text to convert to audio") -text_to_audio_parser.add_argument("streaming", type=bool, location="json", help="Enable streaming response") +text_to_audio_parser = ( + reqparse.RequestParser() + .add_argument("message_id", type=str, required=False, location="json", help="Message ID") + .add_argument("voice", type=str, location="json", help="Voice to use for TTS") + .add_argument("text", type=str, location="json", help="Text to convert to audio") + .add_argument("streaming", type=bool, location="json", help="Enable streaming response") +) @service_api_ns.route("/text-to-audio") diff --git a/api/controllers/service_api/app/completion.py b/api/controllers/service_api/app/completion.py index 22428ee0ab..915e7e9416 100644 --- a/api/controllers/service_api/app/completion.py +++ b/api/controllers/service_api/app/completion.py @@ -37,40 +37,34 @@ logger = logging.getLogger(__name__) # Define parser for completion API -completion_parser = reqparse.RequestParser() -completion_parser.add_argument( - "inputs", type=dict, required=True, location="json", help="Input parameters for completion" -) -completion_parser.add_argument("query", type=str, location="json", default="", help="The query string") -completion_parser.add_argument("files", type=list, required=False, location="json", help="List of file attachments") -completion_parser.add_argument( - "response_mode", type=str, choices=["blocking", "streaming"], location="json", help="Response mode" -) -completion_parser.add_argument( - "retriever_from", type=str, required=False, default="dev", location="json", help="Retriever source" +completion_parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json", help="Input parameters for completion") + .add_argument("query", type=str, location="json", default="", help="The query string") + .add_argument("files", type=list, required=False, location="json", help="List of file attachments") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json", help="Response mode") + .add_argument("retriever_from", type=str, required=False, default="dev", location="json", help="Retriever source") ) # Define parser for chat API -chat_parser = reqparse.RequestParser() -chat_parser.add_argument("inputs", type=dict, required=True, location="json", help="Input parameters for chat") -chat_parser.add_argument("query", type=str, required=True, location="json", help="The chat query") -chat_parser.add_argument("files", type=list, required=False, location="json", help="List of file attachments") -chat_parser.add_argument( - "response_mode", type=str, choices=["blocking", "streaming"], location="json", help="Response mode" +chat_parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json", help="Input parameters for chat") + .add_argument("query", type=str, required=True, location="json", help="The chat query") + .add_argument("files", type=list, required=False, location="json", help="List of file attachments") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json", help="Response mode") + .add_argument("conversation_id", type=uuid_value, location="json", help="Existing conversation ID") + .add_argument("retriever_from", type=str, required=False, default="dev", location="json", help="Retriever source") + .add_argument( + "auto_generate_name", + type=bool, + required=False, + default=True, + location="json", + help="Auto generate conversation name", + ) + .add_argument("workflow_id", type=str, required=False, location="json", help="Workflow ID for advanced chat") ) -chat_parser.add_argument("conversation_id", type=uuid_value, location="json", help="Existing conversation ID") -chat_parser.add_argument( - "retriever_from", type=str, required=False, default="dev", location="json", help="Retriever source" -) -chat_parser.add_argument( - "auto_generate_name", - type=bool, - required=False, - default=True, - location="json", - help="Auto generate conversation name", -) -chat_parser.add_argument("workflow_id", type=str, required=False, location="json", help="Workflow ID for advanced chat") @service_api_ns.route("/completion-messages") diff --git a/api/controllers/service_api/app/conversation.py b/api/controllers/service_api/app/conversation.py index 711dd5704c..c4e23dd2e7 100644 --- a/api/controllers/service_api/app/conversation.py +++ b/api/controllers/service_api/app/conversation.py @@ -24,48 +24,63 @@ from models.model import App, AppMode, EndUser from services.conversation_service import ConversationService # Define parsers for conversation APIs -conversation_list_parser = reqparse.RequestParser() -conversation_list_parser.add_argument( - "last_id", type=uuid_value, location="args", help="Last conversation ID for pagination" -) -conversation_list_parser.add_argument( - "limit", - type=int_range(1, 100), - required=False, - default=20, - location="args", - help="Number of conversations to return", -) -conversation_list_parser.add_argument( - "sort_by", - type=str, - choices=["created_at", "-created_at", "updated_at", "-updated_at"], - required=False, - default="-updated_at", - location="args", - help="Sort order for conversations", +conversation_list_parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args", help="Last conversation ID for pagination") + .add_argument( + "limit", + type=int_range(1, 100), + required=False, + default=20, + location="args", + help="Number of conversations to return", + ) + .add_argument( + "sort_by", + type=str, + choices=["created_at", "-created_at", "updated_at", "-updated_at"], + required=False, + default="-updated_at", + location="args", + help="Sort order for conversations", + ) ) -conversation_rename_parser = reqparse.RequestParser() -conversation_rename_parser.add_argument("name", type=str, required=False, location="json", help="New conversation name") -conversation_rename_parser.add_argument( - "auto_generate", type=bool, required=False, default=False, location="json", help="Auto-generate conversation name" +conversation_rename_parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=False, location="json", help="New conversation name") + .add_argument( + "auto_generate", + type=bool, + required=False, + default=False, + location="json", + help="Auto-generate conversation name", + ) ) -conversation_variables_parser = reqparse.RequestParser() -conversation_variables_parser.add_argument( - "last_id", type=uuid_value, location="args", help="Last variable ID for pagination" -) -conversation_variables_parser.add_argument( - "limit", type=int_range(1, 100), required=False, default=20, location="args", help="Number of variables to return" +conversation_variables_parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args", help="Last variable ID for pagination") + .add_argument( + "limit", + type=int_range(1, 100), + required=False, + default=20, + location="args", + help="Number of variables to return", + ) ) -conversation_variable_update_parser = reqparse.RequestParser() -# using lambda is for passing the already-typed value without modification -# if no lambda, it will be converted to string -# the string cannot be converted using json.loads -conversation_variable_update_parser.add_argument( - "value", required=True, location="json", type=lambda x: x, help="New value for the conversation variable" +conversation_variable_update_parser = reqparse.RequestParser().add_argument( + # using lambda is for passing the already-typed value without modification + # if no lambda, it will be converted to string + # the string cannot be converted using json.loads + "value", + required=True, + location="json", + type=lambda x: x, + help="New value for the conversation variable", ) diff --git a/api/controllers/service_api/app/file_preview.py b/api/controllers/service_api/app/file_preview.py index 63b46f49f2..b8e91f0657 100644 --- a/api/controllers/service_api/app/file_preview.py +++ b/api/controllers/service_api/app/file_preview.py @@ -18,8 +18,7 @@ logger = logging.getLogger(__name__) # Define parser for file preview API -file_preview_parser = reqparse.RequestParser() -file_preview_parser.add_argument( +file_preview_parser = reqparse.RequestParser().add_argument( "as_attachment", type=bool, required=False, default=False, location="args", help="Download as attachment" ) diff --git a/api/controllers/service_api/app/message.py b/api/controllers/service_api/app/message.py index fc506ef723..b8e5ed28e4 100644 --- a/api/controllers/service_api/app/message.py +++ b/api/controllers/service_api/app/message.py @@ -26,25 +26,37 @@ logger = logging.getLogger(__name__) # Define parsers for message APIs -message_list_parser = reqparse.RequestParser() -message_list_parser.add_argument( - "conversation_id", required=True, type=uuid_value, location="args", help="Conversation ID" -) -message_list_parser.add_argument("first_id", type=uuid_value, location="args", help="First message ID for pagination") -message_list_parser.add_argument( - "limit", type=int_range(1, 100), required=False, default=20, location="args", help="Number of messages to return" +message_list_parser = ( + reqparse.RequestParser() + .add_argument("conversation_id", required=True, type=uuid_value, location="args", help="Conversation ID") + .add_argument("first_id", type=uuid_value, location="args", help="First message ID for pagination") + .add_argument( + "limit", + type=int_range(1, 100), + required=False, + default=20, + location="args", + help="Number of messages to return", + ) ) -message_feedback_parser = reqparse.RequestParser() -message_feedback_parser.add_argument( - "rating", type=str, choices=["like", "dislike", None], location="json", help="Feedback rating" +message_feedback_parser = ( + reqparse.RequestParser() + .add_argument("rating", type=str, choices=["like", "dislike", None], location="json", help="Feedback rating") + .add_argument("content", type=str, location="json", help="Feedback content") ) -message_feedback_parser.add_argument("content", type=str, location="json", help="Feedback content") -feedback_list_parser = reqparse.RequestParser() -feedback_list_parser.add_argument("page", type=int, default=1, location="args", help="Page number") -feedback_list_parser.add_argument( - "limit", type=int_range(1, 101), required=False, default=20, location="args", help="Number of feedbacks per page" +feedback_list_parser = ( + reqparse.RequestParser() + .add_argument("page", type=int, default=1, location="args", help="Page number") + .add_argument( + "limit", + type=int_range(1, 101), + required=False, + default=20, + location="args", + help="Number of feedbacks per page", + ) ) diff --git a/api/controllers/service_api/app/workflow.py b/api/controllers/service_api/app/workflow.py index e912563bc6..af5eae463d 100644 --- a/api/controllers/service_api/app/workflow.py +++ b/api/controllers/service_api/app/workflow.py @@ -42,32 +42,36 @@ from services.workflow_app_service import WorkflowAppService logger = logging.getLogger(__name__) # Define parsers for workflow APIs -workflow_run_parser = reqparse.RequestParser() -workflow_run_parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") -workflow_run_parser.add_argument("files", type=list, required=False, location="json") -workflow_run_parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") +workflow_run_parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("files", type=list, required=False, location="json") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") +) -workflow_log_parser = reqparse.RequestParser() -workflow_log_parser.add_argument("keyword", type=str, location="args") -workflow_log_parser.add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args") -workflow_log_parser.add_argument("created_at__before", type=str, location="args") -workflow_log_parser.add_argument("created_at__after", type=str, location="args") -workflow_log_parser.add_argument( - "created_by_end_user_session_id", - type=str, - location="args", - required=False, - default=None, +workflow_log_parser = ( + reqparse.RequestParser() + .add_argument("keyword", type=str, location="args") + .add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args") + .add_argument("created_at__before", type=str, location="args") + .add_argument("created_at__after", type=str, location="args") + .add_argument( + "created_by_end_user_session_id", + type=str, + location="args", + required=False, + default=None, + ) + .add_argument( + "created_by_account", + type=str, + location="args", + required=False, + default=None, + ) + .add_argument("page", type=int_range(1, 99999), default=1, location="args") + .add_argument("limit", type=int_range(1, 100), default=20, location="args") ) -workflow_log_parser.add_argument( - "created_by_account", - type=str, - location="args", - required=False, - default=None, -) -workflow_log_parser.add_argument("page", type=int_range(1, 99999), default=1, location="args") -workflow_log_parser.add_argument("limit", type=int_range(1, 100), default=20, location="args") workflow_run_fields = { "id": fields.String, diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index 92bbb76f0f..9d5566919b 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -33,119 +33,118 @@ def _validate_name(name): # Define parsers for dataset operations -dataset_create_parser = reqparse.RequestParser() -dataset_create_parser.add_argument( - "name", - nullable=False, - required=True, - help="type is required. Name must be between 1 to 40 characters.", - type=_validate_name, -) -dataset_create_parser.add_argument( - "description", - type=validate_description_length, - nullable=True, - required=False, - default="", -) -dataset_create_parser.add_argument( - "indexing_technique", - type=str, - location="json", - choices=Dataset.INDEXING_TECHNIQUE_LIST, - help="Invalid indexing technique.", -) -dataset_create_parser.add_argument( - "permission", - type=str, - location="json", - choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), - help="Invalid permission.", - required=False, - nullable=False, -) -dataset_create_parser.add_argument( - "external_knowledge_api_id", - type=str, - nullable=True, - required=False, - default="_validate_name", -) -dataset_create_parser.add_argument( - "provider", - type=str, - nullable=True, - required=False, - default="vendor", -) -dataset_create_parser.add_argument( - "external_knowledge_id", - type=str, - nullable=True, - required=False, -) -dataset_create_parser.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json") -dataset_create_parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json") -dataset_create_parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") - -dataset_update_parser = reqparse.RequestParser() -dataset_update_parser.add_argument( - "name", - nullable=False, - help="type is required. Name must be between 1 to 40 characters.", - type=_validate_name, -) -dataset_update_parser.add_argument( - "description", location="json", store_missing=False, type=validate_description_length -) -dataset_update_parser.add_argument( - "indexing_technique", - type=str, - location="json", - choices=Dataset.INDEXING_TECHNIQUE_LIST, - nullable=True, - help="Invalid indexing technique.", -) -dataset_update_parser.add_argument( - "permission", - type=str, - location="json", - choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), - help="Invalid permission.", -) -dataset_update_parser.add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.") -dataset_update_parser.add_argument( - "embedding_model_provider", type=str, location="json", help="Invalid embedding model provider." -) -dataset_update_parser.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.") -dataset_update_parser.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.") -dataset_update_parser.add_argument( - "external_retrieval_model", - type=dict, - required=False, - nullable=True, - location="json", - help="Invalid external retrieval model.", -) -dataset_update_parser.add_argument( - "external_knowledge_id", - type=str, - required=False, - nullable=True, - location="json", - help="Invalid external knowledge id.", -) -dataset_update_parser.add_argument( - "external_knowledge_api_id", - type=str, - required=False, - nullable=True, - location="json", - help="Invalid external knowledge api id.", +dataset_create_parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="type is required. Name must be between 1 to 40 characters.", + type=_validate_name, + ) + .add_argument( + "description", + type=validate_description_length, + nullable=True, + required=False, + default="", + ) + .add_argument( + "indexing_technique", + type=str, + location="json", + choices=Dataset.INDEXING_TECHNIQUE_LIST, + help="Invalid indexing technique.", + ) + .add_argument( + "permission", + type=str, + location="json", + choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), + help="Invalid permission.", + required=False, + nullable=False, + ) + .add_argument( + "external_knowledge_api_id", + type=str, + nullable=True, + required=False, + default="_validate_name", + ) + .add_argument( + "provider", + type=str, + nullable=True, + required=False, + default="vendor", + ) + .add_argument( + "external_knowledge_id", + type=str, + nullable=True, + required=False, + ) + .add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json") + .add_argument("embedding_model", type=str, required=False, nullable=True, location="json") + .add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") ) -tag_create_parser = reqparse.RequestParser() -tag_create_parser.add_argument( +dataset_update_parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + help="type is required. Name must be between 1 to 40 characters.", + type=_validate_name, + ) + .add_argument("description", location="json", store_missing=False, type=validate_description_length) + .add_argument( + "indexing_technique", + type=str, + location="json", + choices=Dataset.INDEXING_TECHNIQUE_LIST, + nullable=True, + help="Invalid indexing technique.", + ) + .add_argument( + "permission", + type=str, + location="json", + choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), + help="Invalid permission.", + ) + .add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.") + .add_argument("embedding_model_provider", type=str, location="json", help="Invalid embedding model provider.") + .add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.") + .add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.") + .add_argument( + "external_retrieval_model", + type=dict, + required=False, + nullable=True, + location="json", + help="Invalid external retrieval model.", + ) + .add_argument( + "external_knowledge_id", + type=str, + required=False, + nullable=True, + location="json", + help="Invalid external knowledge id.", + ) + .add_argument( + "external_knowledge_api_id", + type=str, + required=False, + nullable=True, + location="json", + help="Invalid external knowledge api id.", + ) +) + +tag_create_parser = reqparse.RequestParser().add_argument( "name", nullable=False, required=True, @@ -155,32 +154,37 @@ tag_create_parser.add_argument( else (_ for _ in ()).throw(ValueError("Name must be between 1 to 50 characters.")), ) -tag_update_parser = reqparse.RequestParser() -tag_update_parser.add_argument( - "name", - nullable=False, - required=True, - help="Name must be between 1 to 50 characters.", - type=lambda x: x - if x and 1 <= len(x) <= 50 - else (_ for _ in ()).throw(ValueError("Name must be between 1 to 50 characters.")), -) -tag_update_parser.add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str) - -tag_delete_parser = reqparse.RequestParser() -tag_delete_parser.add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str) - -tag_binding_parser = reqparse.RequestParser() -tag_binding_parser.add_argument( - "tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required." -) -tag_binding_parser.add_argument( - "target_id", type=str, nullable=False, required=True, location="json", help="Target Dataset ID is required." +tag_update_parser = ( + reqparse.RequestParser() + .add_argument( + "name", + nullable=False, + required=True, + help="Name must be between 1 to 50 characters.", + type=lambda x: x + if x and 1 <= len(x) <= 50 + else (_ for _ in ()).throw(ValueError("Name must be between 1 to 50 characters.")), + ) + .add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str) ) -tag_unbinding_parser = reqparse.RequestParser() -tag_unbinding_parser.add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.") -tag_unbinding_parser.add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.") +tag_delete_parser = reqparse.RequestParser().add_argument( + "tag_id", nullable=False, required=True, help="Id of a tag.", type=str +) + +tag_binding_parser = ( + reqparse.RequestParser() + .add_argument("tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required.") + .add_argument( + "target_id", type=str, nullable=False, required=True, location="json", help="Target Dataset ID is required." + ) +) + +tag_unbinding_parser = ( + reqparse.RequestParser() + .add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.") + .add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.") +) @service_api_ns.route("/datasets") diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index 961a338bc5..893cd7c923 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -35,37 +35,31 @@ from services.entities.knowledge_entities.knowledge_entities import KnowledgeCon from services.file_service import FileService # Define parsers for document operations -document_text_create_parser = reqparse.RequestParser() -document_text_create_parser.add_argument("name", type=str, required=True, nullable=False, location="json") -document_text_create_parser.add_argument("text", type=str, required=True, nullable=False, location="json") -document_text_create_parser.add_argument("process_rule", type=dict, required=False, nullable=True, location="json") -document_text_create_parser.add_argument("original_document_id", type=str, required=False, location="json") -document_text_create_parser.add_argument( - "doc_form", type=str, default="text_model", required=False, nullable=False, location="json" -) -document_text_create_parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" -) -document_text_create_parser.add_argument( - "indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json" -) -document_text_create_parser.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json") -document_text_create_parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json") -document_text_create_parser.add_argument( - "embedding_model_provider", type=str, required=False, nullable=True, location="json" +document_text_create_parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=True, nullable=False, location="json") + .add_argument("text", type=str, required=True, nullable=False, location="json") + .add_argument("process_rule", type=dict, required=False, nullable=True, location="json") + .add_argument("original_document_id", type=str, required=False, location="json") + .add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") + .add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json") + .add_argument( + "indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json" + ) + .add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json") + .add_argument("embedding_model", type=str, required=False, nullable=True, location="json") + .add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") ) -document_text_update_parser = reqparse.RequestParser() -document_text_update_parser.add_argument("name", type=str, required=False, nullable=True, location="json") -document_text_update_parser.add_argument("text", type=str, required=False, nullable=True, location="json") -document_text_update_parser.add_argument("process_rule", type=dict, required=False, nullable=True, location="json") -document_text_update_parser.add_argument( - "doc_form", type=str, default="text_model", required=False, nullable=False, location="json" +document_text_update_parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=False, nullable=True, location="json") + .add_argument("text", type=str, required=False, nullable=True, location="json") + .add_argument("process_rule", type=dict, required=False, nullable=True, location="json") + .add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") + .add_argument("doc_language", type=str, default="English", required=False, nullable=False, location="json") + .add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") ) -document_text_update_parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" -) -document_text_update_parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") @service_api_ns.route( diff --git a/api/controllers/service_api/dataset/metadata.py b/api/controllers/service_api/dataset/metadata.py index 51420fdd5f..f646f1f4fa 100644 --- a/api/controllers/service_api/dataset/metadata.py +++ b/api/controllers/service_api/dataset/metadata.py @@ -15,21 +15,17 @@ from services.entities.knowledge_entities.knowledge_entities import ( from services.metadata_service import MetadataService # Define parsers for metadata APIs -metadata_create_parser = reqparse.RequestParser() -metadata_create_parser.add_argument( - "type", type=str, required=True, nullable=False, location="json", help="Metadata type" -) -metadata_create_parser.add_argument( - "name", type=str, required=True, nullable=False, location="json", help="Metadata name" +metadata_create_parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=False, location="json", help="Metadata type") + .add_argument("name", type=str, required=True, nullable=False, location="json", help="Metadata name") ) -metadata_update_parser = reqparse.RequestParser() -metadata_update_parser.add_argument( +metadata_update_parser = reqparse.RequestParser().add_argument( "name", type=str, required=True, nullable=False, location="json", help="New metadata name" ) -document_metadata_parser = reqparse.RequestParser() -document_metadata_parser.add_argument( +document_metadata_parser = reqparse.RequestParser().add_argument( "operation_data", type=list, required=True, nullable=False, location="json", help="Metadata operation data" ) diff --git a/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py index 13ef8abc2d..c177e9180a 100644 --- a/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py @@ -17,7 +17,7 @@ from core.app.apps.pipeline.pipeline_generator import PipelineGenerator from core.app.entities.app_invoke_entities import InvokeFrom from libs import helper from libs.login import current_user -from models.account import Account +from models import Account from models.dataset import Pipeline from models.engine import db from services.errors.file import FileTooLargeError, UnsupportedFileTypeError @@ -91,11 +91,13 @@ class DatasourceNodeRunApi(DatasetApiResource): def post(self, tenant_id: str, dataset_id: str, node_id: str): """Resource for getting datasource plugins.""" # Get query parameter to determine published or draft - parser: RequestParser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("credential_id", type=str, required=False, location="json") - parser.add_argument("is_published", type=bool, required=True, location="json") + parser: RequestParser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("credential_id", type=str, required=False, location="json") + .add_argument("is_published", type=bool, required=True, location="json") + ) args: ParseResult = parser.parse_args() datasource_node_run_api_entity = DatasourceNodeRunApiEntity.model_validate(args) @@ -147,19 +149,21 @@ class PipelineRunApi(DatasetApiResource): ) def post(self, tenant_id: str, dataset_id: str): """Resource for running a rag pipeline.""" - parser: RequestParser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("datasource_type", type=str, required=True, location="json") - parser.add_argument("datasource_info_list", type=list, required=True, location="json") - parser.add_argument("start_node_id", type=str, required=True, location="json") - parser.add_argument("is_published", type=bool, required=True, default=True, location="json") - parser.add_argument( - "response_mode", - type=str, - required=True, - choices=["streaming", "blocking"], - default="blocking", - location="json", + parser: RequestParser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("datasource_type", type=str, required=True, location="json") + .add_argument("datasource_info_list", type=list, required=True, location="json") + .add_argument("start_node_id", type=str, required=True, location="json") + .add_argument("is_published", type=bool, required=True, default=True, location="json") + .add_argument( + "response_mode", + type=str, + required=True, + choices=["streaming", "blocking"], + default="blocking", + location="json", + ) ) args: ParseResult = parser.parse_args() diff --git a/api/controllers/service_api/dataset/segment.py b/api/controllers/service_api/dataset/segment.py index d674c7467d..81abd19fed 100644 --- a/api/controllers/service_api/dataset/segment.py +++ b/api/controllers/service_api/dataset/segment.py @@ -1,5 +1,4 @@ from flask import request -from flask_login import current_user from flask_restx import marshal, reqparse from werkzeug.exceptions import NotFound @@ -16,6 +15,7 @@ from core.model_manager import ModelManager from core.model_runtime.entities.model_entities import ModelType from extensions.ext_database import db from fields.segment_fields import child_chunk_fields, segment_fields +from libs.login import current_account_with_tenant from models.dataset import Dataset from services.dataset_service import DatasetService, DocumentService, SegmentService from services.entities.knowledge_entities.knowledge_entities import SegmentUpdateArgs @@ -24,26 +24,34 @@ from services.errors.chunk import ChildChunkDeleteIndexError as ChildChunkDelete from services.errors.chunk import ChildChunkIndexingError as ChildChunkIndexingServiceError # Define parsers for segment operations -segment_create_parser = reqparse.RequestParser() -segment_create_parser.add_argument("segments", type=list, required=False, nullable=True, location="json") +segment_create_parser = reqparse.RequestParser().add_argument( + "segments", type=list, required=False, nullable=True, location="json" +) -segment_list_parser = reqparse.RequestParser() -segment_list_parser.add_argument("status", type=str, action="append", default=[], location="args") -segment_list_parser.add_argument("keyword", type=str, default=None, location="args") +segment_list_parser = ( + reqparse.RequestParser() + .add_argument("status", type=str, action="append", default=[], location="args") + .add_argument("keyword", type=str, default=None, location="args") +) -segment_update_parser = reqparse.RequestParser() -segment_update_parser.add_argument("segment", type=dict, required=False, nullable=True, location="json") +segment_update_parser = reqparse.RequestParser().add_argument( + "segment", type=dict, required=False, nullable=True, location="json" +) -child_chunk_create_parser = reqparse.RequestParser() -child_chunk_create_parser.add_argument("content", type=str, required=True, nullable=False, location="json") +child_chunk_create_parser = reqparse.RequestParser().add_argument( + "content", type=str, required=True, nullable=False, location="json" +) -child_chunk_list_parser = reqparse.RequestParser() -child_chunk_list_parser.add_argument("limit", type=int, default=20, location="args") -child_chunk_list_parser.add_argument("keyword", type=str, default=None, location="args") -child_chunk_list_parser.add_argument("page", type=int, default=1, location="args") +child_chunk_list_parser = ( + reqparse.RequestParser() + .add_argument("limit", type=int, default=20, location="args") + .add_argument("keyword", type=str, default=None, location="args") + .add_argument("page", type=int, default=1, location="args") +) -child_chunk_update_parser = reqparse.RequestParser() -child_chunk_update_parser.add_argument("content", type=str, required=True, nullable=False, location="json") +child_chunk_update_parser = reqparse.RequestParser().add_argument( + "content", type=str, required=True, nullable=False, location="json" +) @service_api_ns.route("/datasets//documents//segments") @@ -66,6 +74,7 @@ class SegmentApi(DatasetApiResource): @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id: str, dataset_id: str, document_id: str): + _, current_tenant_id = current_account_with_tenant() """Create single segment.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -84,7 +93,7 @@ class SegmentApi(DatasetApiResource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -117,6 +126,7 @@ class SegmentApi(DatasetApiResource): } ) def get(self, tenant_id: str, dataset_id: str, document_id: str): + _, current_tenant_id = current_account_with_tenant() """Get segments.""" # check dataset page = request.args.get("page", default=1, type=int) @@ -133,7 +143,7 @@ class SegmentApi(DatasetApiResource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -149,7 +159,7 @@ class SegmentApi(DatasetApiResource): segments, total = SegmentService.get_segments( document_id=document_id, - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, status_list=args["status"], keyword=args["keyword"], page=page, @@ -184,6 +194,7 @@ class DatasetSegmentApi(DatasetApiResource): ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: @@ -195,7 +206,7 @@ class DatasetSegmentApi(DatasetApiResource): if not document: raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") SegmentService.delete_segment(segment, document, dataset) @@ -217,6 +228,7 @@ class DatasetSegmentApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: @@ -232,7 +244,7 @@ class DatasetSegmentApi(DatasetApiResource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -244,7 +256,7 @@ class DatasetSegmentApi(DatasetApiResource): except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -266,6 +278,7 @@ class DatasetSegmentApi(DatasetApiResource): } ) def get(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: @@ -277,7 +290,7 @@ class DatasetSegmentApi(DatasetApiResource): if not document: raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -307,6 +320,7 @@ class ChildChunkApi(DatasetApiResource): @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() """Create child chunk.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -319,7 +333,7 @@ class ChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -328,7 +342,7 @@ class ChildChunkApi(DatasetApiResource): try: model_manager = ModelManager() model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, provider=dataset.embedding_model_provider, model_type=ModelType.TEXT_EMBEDDING, model=dataset.embedding_model, @@ -364,6 +378,7 @@ class ChildChunkApi(DatasetApiResource): } ) def get(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): + _, current_tenant_id = current_account_with_tenant() """Get child chunks.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -376,7 +391,7 @@ class ChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -423,6 +438,7 @@ class DatasetChildChunkApi(DatasetApiResource): @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str, child_chunk_id: str): + _, current_tenant_id = current_account_with_tenant() """Delete child chunk.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -435,7 +451,7 @@ class DatasetChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # check segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -444,9 +460,7 @@ class DatasetChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # check child chunk - child_chunk = SegmentService.get_child_chunk_by_id( - child_chunk_id=child_chunk_id, tenant_id=current_user.current_tenant_id - ) + child_chunk = SegmentService.get_child_chunk_by_id(child_chunk_id=child_chunk_id, tenant_id=current_tenant_id) if not child_chunk: raise NotFound("Child chunk not found.") @@ -483,6 +497,7 @@ class DatasetChildChunkApi(DatasetApiResource): @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def patch(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str, child_chunk_id: str): + _, current_tenant_id = current_account_with_tenant() """Update child chunk.""" # check dataset dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -495,7 +510,7 @@ class DatasetChildChunkApi(DatasetApiResource): raise NotFound("Document not found.") # get segment - segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) + segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -504,9 +519,7 @@ class DatasetChildChunkApi(DatasetApiResource): raise NotFound("Segment not found.") # get child chunk - child_chunk = SegmentService.get_child_chunk_by_id( - child_chunk_id=child_chunk_id, tenant_id=current_user.current_tenant_id - ) + child_chunk = SegmentService.get_child_chunk_by_id(child_chunk_id=child_chunk_id, tenant_id=current_tenant_id) if not child_chunk: raise NotFound("Child chunk not found.") diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py index 2c9be4e887..638ab528f3 100644 --- a/api/controllers/service_api/wraps.py +++ b/api/controllers/service_api/wraps.py @@ -17,7 +17,7 @@ from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.datetime_utils import naive_utc_now from libs.login import current_user -from models.account import Account, Tenant, TenantAccountJoin, TenantStatus +from models import Account, Tenant, TenantAccountJoin, TenantStatus from models.dataset import Dataset, RateLimitLog from models.model import ApiToken, App, DefaultEndUserSessionID, EndUser from services.feature_service import FeatureService diff --git a/api/controllers/web/app.py b/api/controllers/web/app.py index 2bc068ec75..60193f5f15 100644 --- a/api/controllers/web/app.py +++ b/api/controllers/web/app.py @@ -4,12 +4,14 @@ from flask import request from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import Unauthorized +from constants import HEADER_NAME_APP_CODE from controllers.common import fields from controllers.web import web_ns from controllers.web.error import AppUnavailableError from controllers.web.wraps import WebApiResource from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict from libs.passport import PassportService +from libs.token import extract_webapp_passport from models.model import App, AppMode from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService @@ -94,9 +96,11 @@ class AppAccessMode(Resource): } ) def get(self): - parser = reqparse.RequestParser() - parser.add_argument("appId", type=str, required=False, location="args") - parser.add_argument("appCode", type=str, required=False, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("appId", type=str, required=False, location="args") + .add_argument("appCode", type=str, required=False, location="args") + ) args = parser.parse_args() features = FeatureService.get_system_features() @@ -131,18 +135,19 @@ class AppWebAuthPermission(Resource): ) def get(self): user_id = "visitor" + app_code = request.headers.get(HEADER_NAME_APP_CODE) + app_id = request.args.get("appId") + if not app_id or not app_code: + raise ValueError("appId must be provided") + + require_permission_check = WebAppAuthService.is_app_require_permission_check(app_id=app_id) + if not require_permission_check: + return {"result": True} + try: - auth_header = request.headers.get("Authorization") - if auth_header is None: - raise Unauthorized("Authorization header is missing.") - if " " not in auth_header: - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - - auth_scheme, tk = auth_header.split(None, 1) - auth_scheme = auth_scheme.lower() - if auth_scheme != "bearer": - raise Unauthorized("Authorization scheme must be 'Bearer'") - + tk = extract_webapp_passport(app_code, request) + if not tk: + raise Unauthorized("Access token is missing.") decoded = PassportService().verify(tk) user_id = decoded.get("user_id", "visitor") except Unauthorized: @@ -155,14 +160,7 @@ class AppWebAuthPermission(Resource): if not features.webapp_auth.enabled: return {"result": True} - parser = reqparse.RequestParser() - parser.add_argument("appId", type=str, required=True, location="args") - args = parser.parse_args() - - app_id = args["appId"] - app_code = AppService.get_app_code_by_id(app_id) - res = True if WebAppAuthService.is_app_require_permission_check(app_id=app_id): - res = EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(str(user_id), app_code) + res = EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(str(user_id), app_id) return {"result": res} diff --git a/api/controllers/web/audio.py b/api/controllers/web/audio.py index c1c46891b6..3103851088 100644 --- a/api/controllers/web/audio.py +++ b/api/controllers/web/audio.py @@ -108,11 +108,13 @@ class TextApi(WebApiResource): def post(self, app_model: App, end_user): """Convert text to audio""" try: - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=str, required=False, location="json") - parser.add_argument("voice", type=str, location="json") - parser.add_argument("text", type=str, location="json") - parser.add_argument("streaming", type=bool, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("message_id", type=str, required=False, location="json") + .add_argument("voice", type=str, location="json") + .add_argument("text", type=str, location="json") + .add_argument("streaming", type=bool, location="json") + ) args = parser.parse_args() message_id = args.get("message_id", None) diff --git a/api/controllers/web/completion.py b/api/controllers/web/completion.py index 67ae970388..5e45beffc0 100644 --- a/api/controllers/web/completion.py +++ b/api/controllers/web/completion.py @@ -67,12 +67,14 @@ class CompletionApi(WebApiResource): if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, location="json", default="") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - parser.add_argument("retriever_from", type=str, required=False, default="web_app", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json") + .add_argument("query", type=str, location="json", default="") + .add_argument("files", type=list, required=False, location="json") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") + .add_argument("retriever_from", type=str, required=False, default="web_app", location="json") + ) args = parser.parse_args() @@ -166,14 +168,16 @@ class ChatApi(WebApiResource): if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, required=True, location="json") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - parser.add_argument("conversation_id", type=uuid_value, location="json") - parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json") - parser.add_argument("retriever_from", type=str, required=False, default="web_app", location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, location="json") + .add_argument("query", type=str, required=True, location="json") + .add_argument("files", type=list, required=False, location="json") + .add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") + .add_argument("conversation_id", type=uuid_value, location="json") + .add_argument("parent_message_id", type=uuid_value, required=False, location="json") + .add_argument("retriever_from", type=str, required=False, default="web_app", location="json") + ) args = parser.parse_args() diff --git a/api/controllers/web/conversation.py b/api/controllers/web/conversation.py index 03dd986aed..86e19423e5 100644 --- a/api/controllers/web/conversation.py +++ b/api/controllers/web/conversation.py @@ -60,17 +60,19 @@ class ConversationListApi(WebApiResource): if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - parser.add_argument("pinned", type=str, choices=["true", "false", None], location="args") - parser.add_argument( - "sort_by", - type=str, - choices=["created_at", "-created_at", "updated_at", "-updated_at"], - required=False, - default="-updated_at", - location="args", + parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + .add_argument("pinned", type=str, choices=["true", "false", None], location="args") + .add_argument( + "sort_by", + type=str, + choices=["created_at", "-created_at", "updated_at", "-updated_at"], + required=False, + default="-updated_at", + location="args", + ) ) args = parser.parse_args() @@ -161,9 +163,11 @@ class ConversationRenameApi(WebApiResource): conversation_id = str(c_id) - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=False, location="json") - parser.add_argument("auto_generate", type=bool, required=False, default=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("name", type=str, required=False, location="json") + .add_argument("auto_generate", type=bool, required=False, default=False, location="json") + ) args = parser.parse_args() try: diff --git a/api/controllers/web/forgot_password.py b/api/controllers/web/forgot_password.py index c743d0f52b..b9e391e049 100644 --- a/api/controllers/web/forgot_password.py +++ b/api/controllers/web/forgot_password.py @@ -20,7 +20,7 @@ from controllers.web import web_ns from extensions.ext_database import db from libs.helper import email, extract_remote_ip from libs.password import hash_password, valid_password -from models.account import Account +from models import Account from services.account_service import AccountService @@ -40,9 +40,11 @@ class ForgotPasswordSendEmailApi(Resource): } ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() ip_address = extract_remote_ip(request) @@ -76,10 +78,12 @@ class ForgotPasswordCheckApi(Resource): responses={200: "Token is valid", 400: "Bad request - invalid token format", 401: "Invalid or expired token"} ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=str, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, nullable=False, location="json") + ) args = parser.parse_args() user_email = args["email"] @@ -127,10 +131,12 @@ class ForgotPasswordResetApi(Resource): } ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("token", type=str, required=True, nullable=False, location="json") - parser.add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") - parser.add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("token", type=str, required=True, nullable=False, location="json") + .add_argument("new_password", type=valid_password, required=True, nullable=False, location="json") + .add_argument("password_confirm", type=valid_password, required=True, nullable=False, location="json") + ) args = parser.parse_args() # Validate passwords match diff --git a/api/controllers/web/login.py b/api/controllers/web/login.py index a489101cc9..f213fd8c90 100644 --- a/api/controllers/web/login.py +++ b/api/controllers/web/login.py @@ -1,7 +1,9 @@ +from flask import make_response, request from flask_restx import Resource, reqparse from jwt import InvalidTokenError import services +from configs import dify_config from controllers.console.auth.error import ( AuthenticationFailedError, EmailCodeError, @@ -10,9 +12,16 @@ from controllers.console.auth.error import ( from controllers.console.error import AccountBannedError from controllers.console.wraps import only_edition_enterprise, setup_required from controllers.web import web_ns +from controllers.web.wraps import decode_jwt_token from libs.helper import email +from libs.passport import PassportService from libs.password import valid_password +from libs.token import ( + clear_access_token_from_cookie, + extract_access_token, +) from services.account_service import AccountService +from services.app_service import AppService from services.webapp_auth_service import WebAppAuthService @@ -35,9 +44,11 @@ class LoginApi(Resource): ) def post(self): """Authenticate user and login.""" - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("password", type=valid_password, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("password", type=valid_password, required=True, location="json") + ) args = parser.parse_args() try: @@ -50,17 +61,75 @@ class LoginApi(Resource): raise AuthenticationFailedError() token = WebAppAuthService.login(account=account) - return {"result": "success", "data": {"access_token": token}} + response = make_response({"result": "success", "data": {"access_token": token}}) + # set_access_token_to_cookie(request, response, token, samesite="None", httponly=False) + return response -# class LogoutApi(Resource): -# @setup_required -# def get(self): -# account = cast(Account, flask_login.current_user) -# if isinstance(account, flask_login.AnonymousUserMixin): -# return {"result": "success"} -# flask_login.logout_user() -# return {"result": "success"} +# this api helps frontend to check whether user is authenticated +# TODO: remove in the future. frontend should redirect to login page by catching 401 status +@web_ns.route("/login/status") +class LoginStatusApi(Resource): + @setup_required + @web_ns.doc("web_app_login_status") + @web_ns.doc(description="Check login status") + @web_ns.doc( + responses={ + 200: "Login status", + 401: "Login status", + } + ) + def get(self): + app_code = request.args.get("app_code") + token = extract_access_token(request) + if not app_code: + return { + "logged_in": bool(token), + "app_logged_in": False, + } + app_id = AppService.get_app_id_by_code(app_code) + is_public = not dify_config.ENTERPRISE_ENABLED or not WebAppAuthService.is_app_require_permission_check( + app_id=app_id + ) + user_logged_in = False + + if is_public: + user_logged_in = True + else: + try: + PassportService().verify(token=token) + user_logged_in = True + except Exception: + user_logged_in = False + + try: + _ = decode_jwt_token(app_code=app_code) + app_logged_in = True + except Exception: + app_logged_in = False + + return { + "logged_in": user_logged_in, + "app_logged_in": app_logged_in, + } + + +@web_ns.route("/logout") +class LogoutApi(Resource): + @setup_required + @web_ns.doc("web_app_logout") + @web_ns.doc(description="Logout user from web application") + @web_ns.doc( + responses={ + 200: "Logout successful", + } + ) + def post(self): + response = make_response({"result": "success"}) + # enterprise SSO sets same site to None in https deployment + # so we need to logout by calling api + clear_access_token_from_cookie(response, samesite="None") + return response @web_ns.route("/email-code-login") @@ -77,9 +146,11 @@ class EmailCodeLoginSendEmailApi(Resource): } ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=email, required=True, location="json") - parser.add_argument("language", type=str, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=email, required=True, location="json") + .add_argument("language", type=str, required=False, location="json") + ) args = parser.parse_args() if args["language"] is not None and args["language"] == "zh-Hans": @@ -92,7 +163,6 @@ class EmailCodeLoginSendEmailApi(Resource): raise AuthenticationFailedError() else: token = WebAppAuthService.send_email_code_login_email(account=account, language=language) - return {"result": "success", "data": token} @@ -111,10 +181,12 @@ class EmailCodeLoginApi(Resource): } ) def post(self): - parser = reqparse.RequestParser() - parser.add_argument("email", type=str, required=True, location="json") - parser.add_argument("code", type=str, required=True, location="json") - parser.add_argument("token", type=str, required=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("email", type=str, required=True, location="json") + .add_argument("code", type=str, required=True, location="json") + .add_argument("token", type=str, required=True, location="json") + ) args = parser.parse_args() user_email = args["email"] @@ -136,4 +208,6 @@ class EmailCodeLoginApi(Resource): token = WebAppAuthService.login(account=account) AccountService.reset_login_error_rate_limit(args["email"]) - return {"result": "success", "data": {"access_token": token}} + response = make_response({"result": "success", "data": {"access_token": token}}) + # set_access_token_to_cookie(request, response, token, samesite="None", httponly=False) + return response diff --git a/api/controllers/web/message.py b/api/controllers/web/message.py index a52cccac13..9f9aa4838c 100644 --- a/api/controllers/web/message.py +++ b/api/controllers/web/message.py @@ -93,10 +93,12 @@ class MessageListApi(WebApiResource): if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("conversation_id", required=True, type=uuid_value, location="args") - parser.add_argument("first_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("conversation_id", required=True, type=uuid_value, location="args") + .add_argument("first_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() try: @@ -143,9 +145,11 @@ class MessageFeedbackApi(WebApiResource): def post(self, app_model, end_user, message_id): message_id = str(message_id) - parser = reqparse.RequestParser() - parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json") - parser.add_argument("content", type=str, location="json", default=None) + parser = ( + reqparse.RequestParser() + .add_argument("rating", type=str, choices=["like", "dislike", None], location="json") + .add_argument("content", type=str, location="json", default=None) + ) args = parser.parse_args() try: @@ -193,8 +197,7 @@ class MessageMoreLikeThisApi(WebApiResource): message_id = str(message_id) - parser = reqparse.RequestParser() - parser.add_argument( + parser = reqparse.RequestParser().add_argument( "response_mode", type=str, required=True, choices=["blocking", "streaming"], location="args" ) args = parser.parse_args() diff --git a/api/controllers/web/passport.py b/api/controllers/web/passport.py index 7190f06426..776b743e92 100644 --- a/api/controllers/web/passport.py +++ b/api/controllers/web/passport.py @@ -1,17 +1,20 @@ import uuid from datetime import UTC, datetime, timedelta -from flask import request +from flask import make_response, request from flask_restx import Resource from sqlalchemy import func, select from werkzeug.exceptions import NotFound, Unauthorized from configs import dify_config +from constants import HEADER_NAME_APP_CODE from controllers.web import web_ns from controllers.web.error import WebAppAuthRequiredError from extensions.ext_database import db from libs.passport import PassportService +from libs.token import extract_access_token from models.model import App, EndUser, Site +from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService from services.webapp_auth_service import WebAppAuthService, WebAppAuthType @@ -32,15 +35,15 @@ class PassportResource(Resource): ) def get(self): system_features = FeatureService.get_system_features() - app_code = request.headers.get("X-App-Code") + app_code = request.headers.get(HEADER_NAME_APP_CODE) user_id = request.args.get("user_id") - web_app_access_token = request.args.get("web_app_access_token") + access_token = extract_access_token(request) if app_code is None: raise Unauthorized("X-App-Code header is missing.") - + app_id = AppService.get_app_id_by_code(app_code) # exchange token for enterprise logined web user - enterprise_user_decoded = decode_enterprise_webapp_user_id(web_app_access_token) + enterprise_user_decoded = decode_enterprise_webapp_user_id(access_token) if enterprise_user_decoded: # a web user has already logged in, exchange a token for this app without redirecting to the login page return exchange_token_for_existing_web_user( @@ -48,7 +51,7 @@ class PassportResource(Resource): ) if system_features.webapp_auth.enabled: - app_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_code(app_code=app_code) + app_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id=app_id) if not app_settings or not app_settings.access_mode == "public": raise WebAppAuthRequiredError() @@ -99,9 +102,12 @@ class PassportResource(Resource): tk = PassportService().issue(payload) - return { - "access_token": tk, - } + response = make_response( + { + "access_token": tk, + } + ) + return response def decode_enterprise_webapp_user_id(jwt_token: str | None): @@ -189,9 +195,12 @@ def exchange_token_for_existing_web_user(app_code: str, enterprise_user_decoded: "exp": exp, } token: str = PassportService().issue(payload) - return { - "access_token": token, - } + resp = make_response( + { + "access_token": token, + } + ) + return resp def _exchange_for_public_app_token(app_model, site, token_decoded): @@ -224,9 +233,12 @@ def _exchange_for_public_app_token(app_model, site, token_decoded): tk = PassportService().issue(payload) - return { - "access_token": tk, - } + resp = make_response( + { + "access_token": tk, + } + ) + return resp def generate_session_id(): diff --git a/api/controllers/web/remote_files.py b/api/controllers/web/remote_files.py index 0983e30b9d..dac4b3da94 100644 --- a/api/controllers/web/remote_files.py +++ b/api/controllers/web/remote_files.py @@ -97,8 +97,7 @@ class RemoteFileUploadApi(WebApiResource): FileTooLargeError: File exceeds size limit UnsupportedFileTypeError: File type not supported """ - parser = reqparse.RequestParser() - parser.add_argument("url", type=str, required=True, help="URL is required") + parser = reqparse.RequestParser().add_argument("url", type=str, required=True, help="URL is required") args = parser.parse_args() url = args["url"] diff --git a/api/controllers/web/saved_message.py b/api/controllers/web/saved_message.py index 96f09c8d3c..865f3610a7 100644 --- a/api/controllers/web/saved_message.py +++ b/api/controllers/web/saved_message.py @@ -63,9 +63,11 @@ class SavedMessageListApi(WebApiResource): if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + parser = ( + reqparse.RequestParser() + .add_argument("last_id", type=uuid_value, location="args") + .add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") + ) args = parser.parse_args() return SavedMessageService.pagination_by_last_id(app_model, end_user, args["last_id"], args["limit"]) @@ -92,8 +94,7 @@ class SavedMessageListApi(WebApiResource): if app_model.mode != "completion": raise NotCompletionAppError() - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=uuid_value, required=True, location="json") + parser = reqparse.RequestParser().add_argument("message_id", type=uuid_value, required=True, location="json") args = parser.parse_args() try: diff --git a/api/controllers/web/workflow.py b/api/controllers/web/workflow.py index 9a980148d9..3cbb07a296 100644 --- a/api/controllers/web/workflow.py +++ b/api/controllers/web/workflow.py @@ -58,9 +58,11 @@ class WorkflowRunApi(WebApiResource): if app_mode != AppMode.WORKFLOW: raise NotWorkflowAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("files", type=list, required=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("inputs", type=dict, required=True, nullable=False, location="json") + .add_argument("files", type=list, required=False, location="json") + ) args = parser.parse_args() try: diff --git a/api/controllers/web/wraps.py b/api/controllers/web/wraps.py index ba03c4eae4..9efd9f25d1 100644 --- a/api/controllers/web/wraps.py +++ b/api/controllers/web/wraps.py @@ -9,10 +9,13 @@ from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import BadRequest, NotFound, Unauthorized +from constants import HEADER_NAME_APP_CODE from controllers.web.error import WebAppAuthAccessDeniedError, WebAppAuthRequiredError from extensions.ext_database import db from libs.passport import PassportService +from libs.token import extract_webapp_passport from models.model import App, EndUser, Site +from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService, WebAppSettings from services.feature_service import FeatureService from services.webapp_auth_service import WebAppAuthService @@ -35,22 +38,14 @@ def validate_jwt_token(view: Callable[Concatenate[App, EndUser, P], R] | None = return decorator -def decode_jwt_token(): +def decode_jwt_token(app_code: str | None = None): system_features = FeatureService.get_system_features() - app_code = str(request.headers.get("X-App-Code")) + if not app_code: + app_code = str(request.headers.get(HEADER_NAME_APP_CODE)) try: - auth_header = request.headers.get("Authorization") - if auth_header is None: - raise Unauthorized("Authorization header is missing.") - - if " " not in auth_header: - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - - auth_scheme, tk = auth_header.split(None, 1) - auth_scheme = auth_scheme.lower() - - if auth_scheme != "bearer": - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") + tk = extract_webapp_passport(app_code, request) + if not tk: + raise Unauthorized("App token is missing.") decoded = PassportService().verify(tk) app_code = decoded.get("app_code") app_id = decoded.get("app_id") @@ -72,7 +67,8 @@ def decode_jwt_token(): app_web_auth_enabled = False webapp_settings = None if system_features.webapp_auth.enabled: - webapp_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_code(app_code=app_code) + app_id = AppService.get_app_id_by_code(app_code) + webapp_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id) if not webapp_settings: raise NotFound("Web app settings not found.") app_web_auth_enabled = webapp_settings.access_mode != "public" @@ -87,8 +83,9 @@ def decode_jwt_token(): if system_features.webapp_auth.enabled: if not app_code: raise Unauthorized("Please re-login to access the web app.") + app_id = AppService.get_app_id_by_code(app_code) app_web_auth_enabled = ( - EnterpriseService.WebAppAuth.get_app_access_mode_by_code(app_code=str(app_code)).access_mode != "public" + EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id=app_id).access_mode != "public" ) if app_web_auth_enabled: raise WebAppAuthRequiredError() @@ -129,7 +126,8 @@ def _validate_user_accessibility( raise WebAppAuthRequiredError("Web app settings not found.") if WebAppAuthService.is_app_require_permission_check(access_mode=webapp_settings.access_mode): - if not EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(user_id, app_code=app_code): + app_id = AppService.get_app_id_by_code(app_code) + if not EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(user_id, app_id): raise WebAppAuthAccessDeniedError() auth_type = decoded.get("auth_type") diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index b6234491c5..feb0d3358c 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -447,6 +447,8 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): "message_id": message.id, "context": context, "variable_loader": variable_loader, + "workflow_execution_repository": workflow_execution_repository, + "workflow_node_execution_repository": workflow_node_execution_repository, }, ) @@ -466,8 +468,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): conversation=conversation, message=message, user=user, - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, stream=stream, draft_var_saver_factory=self._get_draft_var_saver_factory(invoke_from, account=user), ) @@ -483,6 +483,8 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): message_id: str, context: contextvars.Context, variable_loader: VariableLoader, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, ): """ Generate worker in a new thread. @@ -538,6 +540,8 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): workflow=workflow, system_user_id=system_user_id, app=app, + workflow_execution_repository=workflow_execution_repository, + workflow_node_execution_repository=workflow_node_execution_repository, ) try: @@ -570,8 +574,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): conversation: Conversation, message: Message, user: Union[Account, EndUser], - workflow_execution_repository: WorkflowExecutionRepository, - workflow_node_execution_repository: WorkflowNodeExecutionRepository, draft_var_saver_factory: DraftVariableSaverFactory, stream: bool = False, ) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]: @@ -584,7 +586,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): :param message: message :param user: account or end user :param stream: is stream - :param workflow_node_execution_repository: optional repository for workflow node execution :return: """ # init generate task pipeline @@ -596,8 +597,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): message=message, user=user, dialogue_count=self._dialogue_count, - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, stream=stream, draft_var_saver_factory=draft_var_saver_factory, ) diff --git a/api/core/app/apps/advanced_chat/app_runner.py b/api/core/app/apps/advanced_chat/app_runner.py index 919b135ec9..587c663482 100644 --- a/api/core/app/apps/advanced_chat/app_runner.py +++ b/api/core/app/apps/advanced_chat/app_runner.py @@ -23,8 +23,12 @@ from core.app.features.annotation_reply.annotation_reply import AnnotationReplyF from core.moderation.base import ModerationError from core.moderation.input_moderation import InputModeration from core.variables.variables import VariableUnion -from core.workflow.entities import GraphRuntimeState, VariablePool +from core.workflow.enums import WorkflowType from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel +from core.workflow.graph_engine.layers.persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer +from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository +from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import VariableLoader from core.workflow.workflow_entry import WorkflowEntry @@ -55,6 +59,8 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): workflow: Workflow, system_user_id: str, app: App, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, ): super().__init__( queue_manager=queue_manager, @@ -68,11 +74,24 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): self._workflow = workflow self.system_user_id = system_user_id self._app = app + self._workflow_execution_repository = workflow_execution_repository + self._workflow_node_execution_repository = workflow_node_execution_repository def run(self): app_config = self.application_generate_entity.app_config app_config = cast(AdvancedChatAppConfig, app_config) + system_inputs = SystemVariable( + query=self.application_generate_entity.query, + files=self.application_generate_entity.files, + conversation_id=self.conversation.id, + user_id=self.system_user_id, + dialogue_count=self._dialogue_count, + app_id=app_config.app_id, + workflow_id=app_config.workflow_id, + workflow_execution_id=self.application_generate_entity.workflow_run_id, + ) + with Session(db.engine, expire_on_commit=False) as session: app_record = session.scalar(select(App).where(App.id == app_config.app_id)) @@ -89,7 +108,6 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): else: inputs = self.application_generate_entity.inputs query = self.application_generate_entity.query - files = self.application_generate_entity.files # moderation if self.handle_input_moderation( @@ -114,17 +132,6 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): conversation_variables = self._initialize_conversation_variables() # Create a variable pool. - system_inputs = SystemVariable( - query=query, - files=files, - conversation_id=self.conversation.id, - user_id=self.system_user_id, - dialogue_count=self._dialogue_count, - app_id=app_config.app_id, - workflow_id=app_config.workflow_id, - workflow_execution_id=self.application_generate_entity.workflow_run_id, - ) - # init variable pool variable_pool = VariablePool( system_variables=system_inputs, @@ -172,6 +179,23 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): command_channel=command_channel, ) + self._queue_manager.graph_runtime_state = graph_runtime_state + + persistence_layer = WorkflowPersistenceLayer( + application_generate_entity=self.application_generate_entity, + workflow_info=PersistenceWorkflowInfo( + workflow_id=self._workflow.id, + workflow_type=WorkflowType(self._workflow.type), + version=self._workflow.version, + graph_data=self._workflow.graph_dict, + ), + workflow_execution_repository=self._workflow_execution_repository, + workflow_node_execution_repository=self._workflow_node_execution_repository, + trace_manager=self.application_generate_entity.trace_manager, + ) + + workflow_entry.graph_engine.layer(persistence_layer) + generator = workflow_entry.run() for event in generator: diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index e021b0aca7..8c0102d9bd 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -11,6 +11,7 @@ from sqlalchemy.orm import Session from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom +from core.app.apps.common.graph_runtime_state_support import GraphRuntimeStateSupport from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter from core.app.entities.app_invoke_entities import ( AdvancedChatAppGenerateEntity, @@ -60,25 +61,21 @@ from core.app.task_pipeline.message_cycle_manager import MessageCycleManager from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk from core.model_runtime.entities.llm_entities import LLMUsage from core.ops.ops_trace_manager import TraceQueueManager -from core.workflow.entities import GraphRuntimeState -from core.workflow.enums import WorkflowExecutionStatus, WorkflowType +from core.workflow.enums import WorkflowExecutionStatus from core.workflow.nodes import NodeType from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory -from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository -from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.runtime import GraphRuntimeState from core.workflow.system_variable import SystemVariable -from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager from extensions.ext_database import db from libs.datetime_utils import naive_utc_now -from models import Conversation, EndUser, Message, MessageFile -from models.account import Account +from models import Account, Conversation, EndUser, Message, MessageFile from models.enums import CreatorUserRole from models.workflow import Workflow logger = logging.getLogger(__name__) -class AdvancedChatAppGenerateTaskPipeline: +class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport): """ AdvancedChatAppGenerateTaskPipeline is a class that generate stream output and state management for Application. """ @@ -93,8 +90,6 @@ class AdvancedChatAppGenerateTaskPipeline: user: Union[Account, EndUser], stream: bool, dialogue_count: int, - workflow_execution_repository: WorkflowExecutionRepository, - workflow_node_execution_repository: WorkflowNodeExecutionRepository, draft_var_saver_factory: DraftVariableSaverFactory, ): self._base_task_pipeline = BasedGenerateTaskPipeline( @@ -114,31 +109,20 @@ class AdvancedChatAppGenerateTaskPipeline: else: raise NotImplementedError(f"User type not supported: {type(user)}") - self._workflow_cycle_manager = WorkflowCycleManager( - application_generate_entity=application_generate_entity, - workflow_system_variables=SystemVariable( - query=message.query, - files=application_generate_entity.files, - conversation_id=conversation.id, - user_id=user_session_id, - dialogue_count=dialogue_count, - app_id=application_generate_entity.app_config.app_id, - workflow_id=workflow.id, - workflow_execution_id=application_generate_entity.workflow_run_id, - ), - workflow_info=CycleManagerWorkflowInfo( - workflow_id=workflow.id, - workflow_type=WorkflowType(workflow.type), - version=workflow.version, - graph_data=workflow.graph_dict, - ), - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, + self._workflow_system_variables = SystemVariable( + query=message.query, + files=application_generate_entity.files, + conversation_id=conversation.id, + user_id=user_session_id, + dialogue_count=dialogue_count, + app_id=application_generate_entity.app_config.app_id, + workflow_id=workflow.id, + workflow_execution_id=application_generate_entity.workflow_run_id, ) - self._workflow_response_converter = WorkflowResponseConverter( application_generate_entity=application_generate_entity, user=user, + system_variables=self._workflow_system_variables, ) self._task_state = WorkflowTaskState() @@ -157,6 +141,8 @@ class AdvancedChatAppGenerateTaskPipeline: self._recorded_files: list[Mapping[str, Any]] = [] self._workflow_run_id: str = "" self._draft_var_saver_factory = draft_var_saver_factory + self._graph_runtime_state: GraphRuntimeState | None = None + self._seed_graph_runtime_state_from_queue_manager() def process(self) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]: """ @@ -289,12 +275,6 @@ class AdvancedChatAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - def _ensure_graph_runtime_initialized(self, graph_runtime_state: GraphRuntimeState | None) -> GraphRuntimeState: - """Fluent validation for graph runtime state.""" - if not graph_runtime_state: - raise ValueError("graph runtime state not initialized.") - return graph_runtime_state - def _handle_ping_event(self, event: QueuePingEvent, **kwargs) -> Generator[PingStreamResponse, None, None]: """Handle ping events.""" yield self._base_task_pipeline.ping_stream_response() @@ -305,21 +285,28 @@ class AdvancedChatAppGenerateTaskPipeline: err = self._base_task_pipeline.handle_error(event=event, session=session, message_id=self._message_id) yield self._base_task_pipeline.error_to_stream_response(err) - def _handle_workflow_started_event(self, *args, **kwargs) -> Generator[StreamResponse, None, None]: + def _handle_workflow_started_event( + self, + event: QueueWorkflowStartedEvent, + **kwargs, + ) -> Generator[StreamResponse, None, None]: """Handle workflow started events.""" - with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_start() - self._workflow_run_id = workflow_execution.id_ + runtime_state = self._resolve_graph_runtime_state() + run_id = self._extract_workflow_run_id(runtime_state) + self._workflow_run_id = run_id + with self._database_session() as session: message = self._get_message(session=session) if not message: raise ValueError(f"Message not found: {self._message_id}") - message.workflow_run_id = workflow_execution.id_ - workflow_start_resp = self._workflow_response_converter.workflow_start_to_stream_response( - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) + message.workflow_run_id = run_id + + workflow_start_resp = self._workflow_response_converter.workflow_start_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_run_id=run_id, + workflow_id=self._workflow_id, + ) yield workflow_start_resp @@ -327,13 +314,9 @@ class AdvancedChatAppGenerateTaskPipeline: """Handle node retry events.""" self._ensure_workflow_initialized() - workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried( - workflow_execution_id=self._workflow_run_id, event=event - ) node_retry_resp = self._workflow_response_converter.workflow_node_retry_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) if node_retry_resp: @@ -345,14 +328,9 @@ class AdvancedChatAppGenerateTaskPipeline: """Handle node started events.""" self._ensure_workflow_initialized() - workflow_node_execution = self._workflow_cycle_manager.handle_node_execution_start( - workflow_execution_id=self._workflow_run_id, event=event - ) - node_start_resp = self._workflow_response_converter.workflow_node_start_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) if node_start_resp: @@ -368,14 +346,12 @@ class AdvancedChatAppGenerateTaskPipeline: self._workflow_response_converter.fetch_files_from_node_outputs(event.outputs or {}) ) - workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_success(event=event) node_finish_resp = self._workflow_response_converter.workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) - self._save_output_for_event(event, workflow_node_execution.id) + self._save_output_for_event(event, event.node_execution_id) if node_finish_resp: yield node_finish_resp @@ -386,16 +362,13 @@ class AdvancedChatAppGenerateTaskPipeline: **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle various node failure events.""" - workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_failed(event=event) - node_finish_resp = self._workflow_response_converter.workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) if isinstance(event, QueueNodeExceptionEvent): - self._save_output_for_event(event, workflow_node_execution.id) + self._save_output_for_event(event, event.node_execution_id) if node_finish_resp: yield node_finish_resp @@ -505,29 +478,19 @@ class AdvancedChatAppGenerateTaskPipeline: self, event: QueueWorkflowSucceededEvent, *, - graph_runtime_state: GraphRuntimeState | None = None, trace_manager: TraceQueueManager | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle workflow succeeded events.""" + _ = trace_manager self._ensure_workflow_initialized() - validated_state = self._ensure_graph_runtime_initialized(graph_runtime_state) - - with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_success( - workflow_run_id=self._workflow_run_id, - total_tokens=validated_state.total_tokens, - total_steps=validated_state.node_run_steps, - outputs=event.outputs, - conversation_id=self._conversation_id, - trace_manager=trace_manager, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) + validated_state = self._ensure_graph_runtime_initialized() + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow_id, + status=WorkflowExecutionStatus.SUCCEEDED, + graph_runtime_state=validated_state, + ) yield workflow_finish_resp self._base_task_pipeline.queue_manager.publish(QueueAdvancedChatMessageEndEvent(), PublishFrom.TASK_PIPELINE) @@ -536,30 +499,20 @@ class AdvancedChatAppGenerateTaskPipeline: self, event: QueueWorkflowPartialSuccessEvent, *, - graph_runtime_state: GraphRuntimeState | None = None, trace_manager: TraceQueueManager | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle workflow partial success events.""" + _ = trace_manager self._ensure_workflow_initialized() - validated_state = self._ensure_graph_runtime_initialized(graph_runtime_state) - - with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_partial_success( - workflow_run_id=self._workflow_run_id, - total_tokens=validated_state.total_tokens, - total_steps=validated_state.node_run_steps, - outputs=event.outputs, - exceptions_count=event.exceptions_count, - conversation_id=self._conversation_id, - trace_manager=trace_manager, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) + validated_state = self._ensure_graph_runtime_initialized() + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow_id, + status=WorkflowExecutionStatus.PARTIAL_SUCCEEDED, + graph_runtime_state=validated_state, + exceptions_count=event.exceptions_count, + ) yield workflow_finish_resp self._base_task_pipeline.queue_manager.publish(QueueAdvancedChatMessageEndEvent(), PublishFrom.TASK_PIPELINE) @@ -568,32 +521,25 @@ class AdvancedChatAppGenerateTaskPipeline: self, event: QueueWorkflowFailedEvent, *, - graph_runtime_state: GraphRuntimeState | None = None, trace_manager: TraceQueueManager | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle workflow failed events.""" + _ = trace_manager self._ensure_workflow_initialized() - validated_state = self._ensure_graph_runtime_initialized(graph_runtime_state) + validated_state = self._ensure_graph_runtime_initialized() + + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow_id, + status=WorkflowExecutionStatus.FAILED, + graph_runtime_state=validated_state, + error=event.error, + exceptions_count=event.exceptions_count, + ) with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed( - workflow_run_id=self._workflow_run_id, - total_tokens=validated_state.total_tokens, - total_steps=validated_state.node_run_steps, - status=WorkflowExecutionStatus.FAILED, - error_message=event.error, - conversation_id=self._conversation_id, - trace_manager=trace_manager, - exceptions_count=event.exceptions_count, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) - err_event = QueueErrorEvent(error=ValueError(f"Run failed: {workflow_execution.error_message}")) + err_event = QueueErrorEvent(error=ValueError(f"Run failed: {event.error}")) err = self._base_task_pipeline.handle_error(event=err_event, session=session, message_id=self._message_id) yield workflow_finish_resp @@ -608,25 +554,23 @@ class AdvancedChatAppGenerateTaskPipeline: **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle stop events.""" - if self._workflow_run_id and graph_runtime_state: + _ = trace_manager + resolved_state = None + if self._workflow_run_id: + resolved_state = self._resolve_graph_runtime_state(graph_runtime_state) + + if self._workflow_run_id and resolved_state: + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow_id, + status=WorkflowExecutionStatus.STOPPED, + graph_runtime_state=resolved_state, + error=event.get_stop_reason(), + ) + with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed( - workflow_run_id=self._workflow_run_id, - total_tokens=graph_runtime_state.total_tokens, - total_steps=graph_runtime_state.node_run_steps, - status=WorkflowExecutionStatus.STOPPED, - error_message=event.get_stop_reason(), - conversation_id=self._conversation_id, - trace_manager=trace_manager, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) # Save message - self._save_message(session=session, graph_runtime_state=graph_runtime_state) + self._save_message(session=session, graph_runtime_state=resolved_state) yield workflow_finish_resp elif event.stopped_by in ( @@ -648,7 +592,7 @@ class AdvancedChatAppGenerateTaskPipeline: **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle advanced chat message end events.""" - self._ensure_graph_runtime_initialized(graph_runtime_state) + resolved_state = self._ensure_graph_runtime_initialized(graph_runtime_state) output_moderation_answer = self._base_task_pipeline.handle_output_moderation_when_task_finished( self._task_state.answer @@ -662,7 +606,7 @@ class AdvancedChatAppGenerateTaskPipeline: # Save message with self._database_session() as session: - self._save_message(session=session, graph_runtime_state=graph_runtime_state) + self._save_message(session=session, graph_runtime_state=resolved_state) yield self._message_end_to_stream_response() @@ -671,10 +615,6 @@ class AdvancedChatAppGenerateTaskPipeline: ) -> Generator[StreamResponse, None, None]: """Handle retriever resources events.""" self._message_cycle_manager.handle_retriever_resources(event) - - with self._database_session() as session: - message = self._get_message(session=session) - message.message_metadata = self._task_state.metadata.model_dump_json() return yield # Make this a generator @@ -683,10 +623,6 @@ class AdvancedChatAppGenerateTaskPipeline: ) -> Generator[StreamResponse, None, None]: """Handle annotation reply events.""" self._message_cycle_manager.handle_annotation_reply(event) - - with self._database_session() as session: - message = self._get_message(session=session) - message.message_metadata = self._task_state.metadata.model_dump_json() return yield # Make this a generator @@ -740,7 +676,6 @@ class AdvancedChatAppGenerateTaskPipeline: self, event: Any, *, - graph_runtime_state: GraphRuntimeState | None = None, tts_publisher: AppGeneratorTTSPublisher | None = None, trace_manager: TraceQueueManager | None = None, queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None, @@ -753,7 +688,6 @@ class AdvancedChatAppGenerateTaskPipeline: if handler := handlers.get(event_type): yield from handler( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -770,7 +704,6 @@ class AdvancedChatAppGenerateTaskPipeline: ): yield from self._handle_node_failed_events( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -789,15 +722,12 @@ class AdvancedChatAppGenerateTaskPipeline: Process stream response using elegant Fluent Python patterns. Maintains exact same functionality as original 57-if-statement version. """ - # Initialize graph runtime state - graph_runtime_state: GraphRuntimeState | None = None - for queue_message in self._base_task_pipeline.queue_manager.listen(): event = queue_message.event match event: case QueueWorkflowStartedEvent(): - graph_runtime_state = event.graph_runtime_state + self._resolve_graph_runtime_state() yield from self._handle_workflow_started_event(event) case QueueErrorEvent(): @@ -805,15 +735,11 @@ class AdvancedChatAppGenerateTaskPipeline: break case QueueWorkflowFailedEvent(): - yield from self._handle_workflow_failed_event( - event, graph_runtime_state=graph_runtime_state, trace_manager=trace_manager - ) + yield from self._handle_workflow_failed_event(event, trace_manager=trace_manager) break case QueueStopEvent(): - yield from self._handle_stop_event( - event, graph_runtime_state=graph_runtime_state, trace_manager=trace_manager - ) + yield from self._handle_stop_event(event, graph_runtime_state=None, trace_manager=trace_manager) break # Handle all other events through elegant dispatch @@ -821,7 +747,6 @@ class AdvancedChatAppGenerateTaskPipeline: if responses := list( self._dispatch_event( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -879,6 +804,12 @@ class AdvancedChatAppGenerateTaskPipeline: else: self._task_state.metadata.usage = LLMUsage.empty_usage() + def _seed_graph_runtime_state_from_queue_manager(self) -> None: + """Bootstrap the cached runtime state from the queue manager when present.""" + candidate = self._base_task_pipeline.queue_manager.graph_runtime_state + if candidate is not None: + self._graph_runtime_state = candidate + def _message_end_to_stream_response(self) -> MessageEndStreamResponse: """ Message end to stream response. diff --git a/api/core/app/apps/base_app_queue_manager.py b/api/core/app/apps/base_app_queue_manager.py index 4b246a53d3..698eee9894 100644 --- a/api/core/app/apps/base_app_queue_manager.py +++ b/api/core/app/apps/base_app_queue_manager.py @@ -1,10 +1,12 @@ import logging import queue +import threading import time from abc import abstractmethod from enum import IntEnum, auto from typing import Any +from cachetools import TTLCache, cachedmethod from redis.exceptions import RedisError from sqlalchemy.orm import DeclarativeMeta @@ -18,6 +20,7 @@ from core.app.entities.queue_entities import ( QueueStopEvent, WorkflowQueueMessage, ) +from core.workflow.runtime import GraphRuntimeState from extensions.ext_redis import redis_client logger = logging.getLogger(__name__) @@ -45,6 +48,9 @@ class AppQueueManager: q: queue.Queue[WorkflowQueueMessage | MessageQueueMessage | None] = queue.Queue() self._q = q + self._graph_runtime_state: GraphRuntimeState | None = None + self._stopped_cache: TTLCache[tuple, bool] = TTLCache(maxsize=1, ttl=1) + self._cache_lock = threading.Lock() def listen(self): """ @@ -105,6 +111,16 @@ class AppQueueManager: """ self.publish(QueueErrorEvent(error=e), pub_from) + @property + def graph_runtime_state(self) -> GraphRuntimeState | None: + """Retrieve the attached graph runtime state, if available.""" + return self._graph_runtime_state + + @graph_runtime_state.setter + def graph_runtime_state(self, graph_runtime_state: GraphRuntimeState | None) -> None: + """Attach the live graph runtime state reference for downstream consumers.""" + self._graph_runtime_state = graph_runtime_state + def publish(self, event: AppQueueEvent, pub_from: PublishFrom): """ Publish event to queue @@ -157,6 +173,7 @@ class AppQueueManager: stopped_cache_key = cls._generate_stopped_cache_key(task_id) redis_client.setex(stopped_cache_key, 600, 1) + @cachedmethod(lambda self: self._stopped_cache, lock=lambda self: self._cache_lock) def _is_stopped(self) -> bool: """ Check if task is stopped diff --git a/api/core/app/apps/chat/app_generator.py b/api/core/app/apps/chat/app_generator.py index 8bd956b314..c1251d2feb 100644 --- a/api/core/app/apps/chat/app_generator.py +++ b/api/core/app/apps/chat/app_generator.py @@ -23,7 +23,7 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError from core.ops.ops_trace_manager import TraceQueueManager from extensions.ext_database import db from factories import file_factory -from models.account import Account +from models import Account from models.model import App, EndUser from services.conversation_service import ConversationService diff --git a/api/core/app/apps/common/graph_runtime_state_support.py b/api/core/app/apps/common/graph_runtime_state_support.py new file mode 100644 index 0000000000..0b03149665 --- /dev/null +++ b/api/core/app/apps/common/graph_runtime_state_support.py @@ -0,0 +1,55 @@ +"""Shared helpers for managing GraphRuntimeState across task pipelines.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from core.workflow.runtime import GraphRuntimeState + +if TYPE_CHECKING: + from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline + + +class GraphRuntimeStateSupport: + """ + Mixin that centralises common GraphRuntimeState access patterns used by task pipelines. + + Subclasses are expected to provide: + * `_base_task_pipeline` – exposing the queue manager with an optional cached runtime state. + * `_graph_runtime_state` attribute used as the local cache for the runtime state. + """ + + _base_task_pipeline: BasedGenerateTaskPipeline + _graph_runtime_state: GraphRuntimeState | None = None + + def _ensure_graph_runtime_initialized( + self, + graph_runtime_state: GraphRuntimeState | None = None, + ) -> GraphRuntimeState: + """Validate and return the active graph runtime state.""" + return self._resolve_graph_runtime_state(graph_runtime_state) + + def _extract_workflow_run_id(self, graph_runtime_state: GraphRuntimeState) -> str: + system_variables = graph_runtime_state.variable_pool.system_variables + if not system_variables or not system_variables.workflow_execution_id: + raise ValueError("workflow_execution_id missing from runtime state") + return str(system_variables.workflow_execution_id) + + def _resolve_graph_runtime_state( + self, + graph_runtime_state: GraphRuntimeState | None = None, + ) -> GraphRuntimeState: + """Return the cached runtime state or bootstrap it from the queue manager.""" + if graph_runtime_state is not None: + self._graph_runtime_state = graph_runtime_state + return graph_runtime_state + + if self._graph_runtime_state is None: + candidate = self._base_task_pipeline.queue_manager.graph_runtime_state + if candidate is not None: + self._graph_runtime_state = candidate + + if self._graph_runtime_state is None: + raise ValueError("graph runtime state not initialized.") + + return self._graph_runtime_state diff --git a/api/core/app/apps/common/workflow_response_converter.py b/api/core/app/apps/common/workflow_response_converter.py index 7c7a4fd6ac..2c9ce5b56d 100644 --- a/api/core/app/apps/common/workflow_response_converter.py +++ b/api/core/app/apps/common/workflow_response_converter.py @@ -1,9 +1,8 @@ import time from collections.abc import Mapping, Sequence -from datetime import UTC, datetime -from typing import Any, Union - -from sqlalchemy.orm import Session +from dataclasses import dataclass +from datetime import datetime +from typing import Any, NewType, Union from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity from core.app.entities.queue_entities import ( @@ -39,16 +38,36 @@ from core.plugin.impl.datasource import PluginDatasourceManager from core.tools.entities.tool_entities import ToolProviderType from core.tools.tool_manager import ToolManager from core.variables.segments import ArrayFileSegment, FileSegment, Segment -from core.workflow.entities import WorkflowExecution, WorkflowNodeExecution -from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus +from core.workflow.enums import ( + NodeType, + SystemVariableKey, + WorkflowExecutionStatus, + WorkflowNodeExecutionMetadataKey, + WorkflowNodeExecutionStatus, +) +from core.workflow.runtime import GraphRuntimeState +from core.workflow.system_variable import SystemVariable +from core.workflow.workflow_entry import WorkflowEntry from core.workflow.workflow_type_encoder import WorkflowRuntimeTypeConverter from libs.datetime_utils import naive_utc_now -from models import ( - Account, - EndUser, -) +from models import Account, EndUser from services.variable_truncator import VariableTruncator +NodeExecutionId = NewType("NodeExecutionId", str) + + +@dataclass(slots=True) +class _NodeSnapshot: + """In-memory cache for node metadata between start and completion events.""" + + title: str + index: int + start_at: datetime + iteration_id: str = "" + """Empty string means the node is not executing inside an iteration.""" + loop_id: str = "" + """Empty string means the node is not executing inside a loop.""" + class WorkflowResponseConverter: def __init__( @@ -56,37 +75,151 @@ class WorkflowResponseConverter: *, application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity], user: Union[Account, EndUser], + system_variables: SystemVariable, ): self._application_generate_entity = application_generate_entity self._user = user + self._system_variables = system_variables + self._workflow_inputs = self._prepare_workflow_inputs() self._truncator = VariableTruncator.default() + self._node_snapshots: dict[NodeExecutionId, _NodeSnapshot] = {} + self._workflow_execution_id: str | None = None + self._workflow_started_at: datetime | None = None + + # ------------------------------------------------------------------ + # Workflow lifecycle helpers + # ------------------------------------------------------------------ + def _prepare_workflow_inputs(self) -> Mapping[str, Any]: + inputs = dict(self._application_generate_entity.inputs) + for field_name, value in self._system_variables.to_dict().items(): + # TODO(@future-refactor): store system variables separately from user inputs so we don't + # need to flatten `sys.*` entries into the input payload just for rerun/export tooling. + if field_name == SystemVariableKey.CONVERSATION_ID: + # Conversation IDs are session-scoped; omitting them keeps workflow inputs + # reusable without pinning new runs to a prior conversation. + continue + inputs[f"sys.{field_name}"] = value + handled = WorkflowEntry.handle_special_values(inputs) + return dict(handled or {}) + + def _ensure_workflow_run_id(self, workflow_run_id: str | None = None) -> str: + """Return the memoized workflow run id, optionally seeding it during start events.""" + if workflow_run_id is not None: + self._workflow_execution_id = workflow_run_id + if not self._workflow_execution_id: + raise ValueError("workflow_run_id missing before streaming workflow events") + return self._workflow_execution_id + + # ------------------------------------------------------------------ + # Node snapshot helpers + # ------------------------------------------------------------------ + def _store_snapshot(self, event: QueueNodeStartedEvent) -> _NodeSnapshot: + snapshot = _NodeSnapshot( + title=event.node_title, + index=event.node_run_index, + start_at=event.start_at, + iteration_id=event.in_iteration_id or "", + loop_id=event.in_loop_id or "", + ) + node_execution_id = NodeExecutionId(event.node_execution_id) + self._node_snapshots[node_execution_id] = snapshot + return snapshot + + def _get_snapshot(self, node_execution_id: str) -> _NodeSnapshot | None: + return self._node_snapshots.get(NodeExecutionId(node_execution_id)) + + def _pop_snapshot(self, node_execution_id: str) -> _NodeSnapshot | None: + return self._node_snapshots.pop(NodeExecutionId(node_execution_id), None) + + @staticmethod + def _merge_metadata( + base_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None, + snapshot: _NodeSnapshot | None, + ) -> Mapping[WorkflowNodeExecutionMetadataKey, Any] | None: + if not base_metadata and not snapshot: + return base_metadata + + merged: dict[WorkflowNodeExecutionMetadataKey, Any] = {} + if base_metadata: + merged.update(base_metadata) + + if snapshot: + if snapshot.iteration_id: + merged[WorkflowNodeExecutionMetadataKey.ITERATION_ID] = snapshot.iteration_id + if snapshot.loop_id: + merged[WorkflowNodeExecutionMetadataKey.LOOP_ID] = snapshot.loop_id + + return merged or None + + def _truncate_mapping( + self, + mapping: Mapping[str, Any] | None, + ) -> tuple[Mapping[str, Any] | None, bool]: + if mapping is None: + return None, False + if not mapping: + return {}, False + + normalized = WorkflowEntry.handle_special_values(dict(mapping)) + if normalized is None: + return None, False + + truncated, is_truncated = self._truncator.truncate_variable_mapping(dict(normalized)) + return truncated, is_truncated + + @staticmethod + def _encode_outputs(outputs: Mapping[str, Any] | None) -> Mapping[str, Any] | None: + if outputs is None: + return None + converter = WorkflowRuntimeTypeConverter() + return converter.to_json_encodable(outputs) def workflow_start_to_stream_response( self, *, task_id: str, - workflow_execution: WorkflowExecution, + workflow_run_id: str, + workflow_id: str, ) -> WorkflowStartStreamResponse: + run_id = self._ensure_workflow_run_id(workflow_run_id) + started_at = naive_utc_now() + self._workflow_started_at = started_at + return WorkflowStartStreamResponse( task_id=task_id, - workflow_run_id=workflow_execution.id_, + workflow_run_id=run_id, data=WorkflowStartStreamResponse.Data( - id=workflow_execution.id_, - workflow_id=workflow_execution.workflow_id, - inputs=workflow_execution.inputs, - created_at=int(workflow_execution.started_at.timestamp()), + id=run_id, + workflow_id=workflow_id, + inputs=self._workflow_inputs, + created_at=int(started_at.timestamp()), ), ) def workflow_finish_to_stream_response( self, *, - session: Session, task_id: str, - workflow_execution: WorkflowExecution, + workflow_id: str, + status: WorkflowExecutionStatus, + graph_runtime_state: GraphRuntimeState, + error: str | None = None, + exceptions_count: int = 0, ) -> WorkflowFinishStreamResponse: - created_by = None + run_id = self._ensure_workflow_run_id() + started_at = self._workflow_started_at + if started_at is None: + raise ValueError( + "workflow_finish_to_stream_response called before workflow_start_to_stream_response", + ) + finished_at = naive_utc_now() + elapsed_time = (finished_at - started_at).total_seconds() + + outputs_mapping = graph_runtime_state.outputs or {} + encoded_outputs = WorkflowRuntimeTypeConverter().to_json_encodable(outputs_mapping) + + created_by: Mapping[str, object] | None user = self._user if isinstance(user, Account): created_by = { @@ -94,38 +227,29 @@ class WorkflowResponseConverter: "name": user.name, "email": user.email, } - elif isinstance(user, EndUser): + else: created_by = { "id": user.id, "user": user.session_id, } - else: - raise NotImplementedError(f"User type not supported: {type(user)}") - - # Handle the case where finished_at is None by using current time as default - finished_at_timestamp = ( - int(workflow_execution.finished_at.timestamp()) - if workflow_execution.finished_at - else int(datetime.now(UTC).timestamp()) - ) return WorkflowFinishStreamResponse( task_id=task_id, - workflow_run_id=workflow_execution.id_, + workflow_run_id=run_id, data=WorkflowFinishStreamResponse.Data( - id=workflow_execution.id_, - workflow_id=workflow_execution.workflow_id, - status=workflow_execution.status, - outputs=WorkflowRuntimeTypeConverter().to_json_encodable(workflow_execution.outputs), - error=workflow_execution.error_message, - elapsed_time=workflow_execution.elapsed_time, - total_tokens=workflow_execution.total_tokens, - total_steps=workflow_execution.total_steps, + id=run_id, + workflow_id=workflow_id, + status=status.value, + outputs=encoded_outputs, + error=error, + elapsed_time=elapsed_time, + total_tokens=graph_runtime_state.total_tokens, + total_steps=graph_runtime_state.node_run_steps, created_by=created_by, - created_at=int(workflow_execution.started_at.timestamp()), - finished_at=finished_at_timestamp, - files=self.fetch_files_from_node_outputs(workflow_execution.outputs), - exceptions_count=workflow_execution.exceptions_count, + created_at=int(started_at.timestamp()), + finished_at=int(finished_at.timestamp()), + files=self.fetch_files_from_node_outputs(outputs_mapping), + exceptions_count=exceptions_count, ), ) @@ -134,38 +258,28 @@ class WorkflowResponseConverter: *, event: QueueNodeStartedEvent, task_id: str, - workflow_node_execution: WorkflowNodeExecution, ) -> NodeStartStreamResponse | None: - if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: - return None - if not workflow_node_execution.workflow_execution_id: + if event.node_type in {NodeType.ITERATION, NodeType.LOOP}: return None + run_id = self._ensure_workflow_run_id() + snapshot = self._store_snapshot(event) response = NodeStartStreamResponse( task_id=task_id, - workflow_run_id=workflow_node_execution.workflow_execution_id, + workflow_run_id=run_id, data=NodeStartStreamResponse.Data( - id=workflow_node_execution.id, - node_id=workflow_node_execution.node_id, - node_type=workflow_node_execution.node_type, - title=workflow_node_execution.title, - index=workflow_node_execution.index, - predecessor_node_id=workflow_node_execution.predecessor_node_id, - inputs=workflow_node_execution.get_response_inputs(), - inputs_truncated=workflow_node_execution.inputs_truncated, - created_at=int(workflow_node_execution.created_at.timestamp()), - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - parent_parallel_id=event.parent_parallel_id, - parent_parallel_start_node_id=event.parent_parallel_start_node_id, + id=event.node_execution_id, + node_id=event.node_id, + node_type=event.node_type, + title=snapshot.title, + index=snapshot.index, + created_at=int(snapshot.start_at.timestamp()), iteration_id=event.in_iteration_id, loop_id=event.in_loop_id, - parallel_run_id=event.parallel_mode_run_id, agent_strategy=event.agent_strategy, ), ) - # extras logic if event.node_type == NodeType.TOOL: response.data.extras["icon"] = ToolManager.get_tool_icon( tenant_id=self._application_generate_entity.app_config.tenant_id, @@ -189,41 +303,54 @@ class WorkflowResponseConverter: *, event: QueueNodeSucceededEvent | QueueNodeFailedEvent | QueueNodeExceptionEvent, task_id: str, - workflow_node_execution: WorkflowNodeExecution, ) -> NodeFinishStreamResponse | None: - if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: - return None - if not workflow_node_execution.workflow_execution_id: - return None - if not workflow_node_execution.finished_at: + if event.node_type in {NodeType.ITERATION, NodeType.LOOP}: return None + run_id = self._ensure_workflow_run_id() + snapshot = self._pop_snapshot(event.node_execution_id) - json_converter = WorkflowRuntimeTypeConverter() + start_at = snapshot.start_at if snapshot else event.start_at + finished_at = naive_utc_now() + elapsed_time = (finished_at - start_at).total_seconds() + + inputs, inputs_truncated = self._truncate_mapping(event.inputs) + process_data, process_data_truncated = self._truncate_mapping(event.process_data) + encoded_outputs = self._encode_outputs(event.outputs) + outputs, outputs_truncated = self._truncate_mapping(encoded_outputs) + metadata = self._merge_metadata(event.execution_metadata, snapshot) + + if isinstance(event, QueueNodeSucceededEvent): + status = WorkflowNodeExecutionStatus.SUCCEEDED.value + error_message = event.error + elif isinstance(event, QueueNodeFailedEvent): + status = WorkflowNodeExecutionStatus.FAILED.value + error_message = event.error + else: + status = WorkflowNodeExecutionStatus.EXCEPTION.value + error_message = event.error return NodeFinishStreamResponse( task_id=task_id, - workflow_run_id=workflow_node_execution.workflow_execution_id, + workflow_run_id=run_id, data=NodeFinishStreamResponse.Data( - id=workflow_node_execution.id, - node_id=workflow_node_execution.node_id, - node_type=workflow_node_execution.node_type, - index=workflow_node_execution.index, - title=workflow_node_execution.title, - predecessor_node_id=workflow_node_execution.predecessor_node_id, - inputs=workflow_node_execution.get_response_inputs(), - inputs_truncated=workflow_node_execution.inputs_truncated, - process_data=workflow_node_execution.get_response_process_data(), - process_data_truncated=workflow_node_execution.process_data_truncated, - outputs=json_converter.to_json_encodable(workflow_node_execution.get_response_outputs()), - outputs_truncated=workflow_node_execution.outputs_truncated, - status=workflow_node_execution.status, - error=workflow_node_execution.error, - elapsed_time=workflow_node_execution.elapsed_time, - execution_metadata=workflow_node_execution.metadata, - created_at=int(workflow_node_execution.created_at.timestamp()), - finished_at=int(workflow_node_execution.finished_at.timestamp()), - files=self.fetch_files_from_node_outputs(workflow_node_execution.outputs or {}), - parallel_id=event.parallel_id, + id=event.node_execution_id, + node_id=event.node_id, + node_type=event.node_type, + index=snapshot.index if snapshot else 0, + title=snapshot.title if snapshot else "", + inputs=inputs, + inputs_truncated=inputs_truncated, + process_data=process_data, + process_data_truncated=process_data_truncated, + outputs=outputs, + outputs_truncated=outputs_truncated, + status=status, + error=error_message, + elapsed_time=elapsed_time, + execution_metadata=metadata, + created_at=int(start_at.timestamp()), + finished_at=int(finished_at.timestamp()), + files=self.fetch_files_from_node_outputs(event.outputs or {}), iteration_id=event.in_iteration_id, loop_id=event.in_loop_id, ), @@ -234,44 +361,45 @@ class WorkflowResponseConverter: *, event: QueueNodeRetryEvent, task_id: str, - workflow_node_execution: WorkflowNodeExecution, - ) -> Union[NodeRetryStreamResponse, NodeFinishStreamResponse] | None: - if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: - return None - if not workflow_node_execution.workflow_execution_id: - return None - if not workflow_node_execution.finished_at: + ) -> NodeRetryStreamResponse | None: + if event.node_type in {NodeType.ITERATION, NodeType.LOOP}: return None + run_id = self._ensure_workflow_run_id() - json_converter = WorkflowRuntimeTypeConverter() + snapshot = self._get_snapshot(event.node_execution_id) + if snapshot is None: + raise AssertionError("node retry event arrived without a stored snapshot") + finished_at = naive_utc_now() + elapsed_time = (finished_at - event.start_at).total_seconds() + + inputs, inputs_truncated = self._truncate_mapping(event.inputs) + process_data, process_data_truncated = self._truncate_mapping(event.process_data) + encoded_outputs = self._encode_outputs(event.outputs) + outputs, outputs_truncated = self._truncate_mapping(encoded_outputs) + metadata = self._merge_metadata(event.execution_metadata, snapshot) return NodeRetryStreamResponse( task_id=task_id, - workflow_run_id=workflow_node_execution.workflow_execution_id, + workflow_run_id=run_id, data=NodeRetryStreamResponse.Data( - id=workflow_node_execution.id, - node_id=workflow_node_execution.node_id, - node_type=workflow_node_execution.node_type, - index=workflow_node_execution.index, - title=workflow_node_execution.title, - predecessor_node_id=workflow_node_execution.predecessor_node_id, - inputs=workflow_node_execution.get_response_inputs(), - inputs_truncated=workflow_node_execution.inputs_truncated, - process_data=workflow_node_execution.get_response_process_data(), - process_data_truncated=workflow_node_execution.process_data_truncated, - outputs=json_converter.to_json_encodable(workflow_node_execution.get_response_outputs()), - outputs_truncated=workflow_node_execution.outputs_truncated, - status=workflow_node_execution.status, - error=workflow_node_execution.error, - elapsed_time=workflow_node_execution.elapsed_time, - execution_metadata=workflow_node_execution.metadata, - created_at=int(workflow_node_execution.created_at.timestamp()), - finished_at=int(workflow_node_execution.finished_at.timestamp()), - files=self.fetch_files_from_node_outputs(workflow_node_execution.outputs or {}), - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - parent_parallel_id=event.parent_parallel_id, - parent_parallel_start_node_id=event.parent_parallel_start_node_id, + id=event.node_execution_id, + node_id=event.node_id, + node_type=event.node_type, + index=snapshot.index, + title=snapshot.title, + inputs=inputs, + inputs_truncated=inputs_truncated, + process_data=process_data, + process_data_truncated=process_data_truncated, + outputs=outputs, + outputs_truncated=outputs_truncated, + status=WorkflowNodeExecutionStatus.RETRY.value, + error=event.error, + elapsed_time=elapsed_time, + execution_metadata=metadata, + created_at=int(snapshot.start_at.timestamp()), + finished_at=int(finished_at.timestamp()), + files=self.fetch_files_from_node_outputs(event.outputs or {}), iteration_id=event.in_iteration_id, loop_id=event.in_loop_id, retry_index=event.retry_index, @@ -379,8 +507,6 @@ class WorkflowResponseConverter: inputs=new_inputs, inputs_truncated=truncated, metadata=event.metadata or {}, - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, ), ) @@ -405,9 +531,6 @@ class WorkflowResponseConverter: pre_loop_output={}, created_at=int(time.time()), extras={}, - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - parallel_mode_run_id=event.parallel_mode_run_id, ), ) @@ -446,8 +569,6 @@ class WorkflowResponseConverter: execution_metadata=event.metadata, finished_at=int(time.time()), steps=event.steps, - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, ), ) diff --git a/api/core/app/apps/completion/generate_response_converter.py b/api/core/app/apps/completion/generate_response_converter.py index d7e9ebdf24..a4f574642d 100644 --- a/api/core/app/apps/completion/generate_response_converter.py +++ b/api/core/app/apps/completion/generate_response_converter.py @@ -112,7 +112,7 @@ class CompletionAppGenerateResponseConverter(AppGenerateResponseConverter): metadata = {} sub_stream_response_dict["metadata"] = cls._get_simple_metadata(metadata) response_chunk.update(sub_stream_response_dict) - if isinstance(sub_stream_response, ErrorStreamResponse): + elif isinstance(sub_stream_response, ErrorStreamResponse): data = cls._error_to_stream_response(sub_stream_response.err) response_chunk.update(data) else: diff --git a/api/core/app/apps/message_based_app_generator.py b/api/core/app/apps/message_based_app_generator.py index 170c6a274b..7a51b8f3a5 100644 --- a/api/core/app/apps/message_based_app_generator.py +++ b/api/core/app/apps/message_based_app_generator.py @@ -207,6 +207,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): from_source=from_source, from_end_user_id=end_user_id, from_account_id=account_id, + app_mode=app_config.app_mode, ) db.session.add(message) diff --git a/api/core/app/apps/pipeline/pipeline_generator.py b/api/core/app/apps/pipeline/pipeline_generator.py index bd077c4cb8..1fb076b685 100644 --- a/api/core/app/apps/pipeline/pipeline_generator.py +++ b/api/core/app/apps/pipeline/pipeline_generator.py @@ -352,6 +352,8 @@ class PipelineGenerator(BaseAppGenerator): "application_generate_entity": application_generate_entity, "workflow_thread_pool_id": workflow_thread_pool_id, "variable_loader": variable_loader, + "workflow_execution_repository": workflow_execution_repository, + "workflow_node_execution_repository": workflow_node_execution_repository, }, ) @@ -367,8 +369,6 @@ class PipelineGenerator(BaseAppGenerator): workflow=workflow, queue_manager=queue_manager, user=user, - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, stream=streaming, draft_var_saver_factory=draft_var_saver_factory, ) @@ -573,6 +573,8 @@ class PipelineGenerator(BaseAppGenerator): queue_manager: AppQueueManager, context: contextvars.Context, variable_loader: VariableLoader, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, workflow_thread_pool_id: str | None = None, ) -> None: """ @@ -620,6 +622,8 @@ class PipelineGenerator(BaseAppGenerator): variable_loader=variable_loader, workflow=workflow, system_user_id=system_user_id, + workflow_execution_repository=workflow_execution_repository, + workflow_node_execution_repository=workflow_node_execution_repository, ) runner.run() @@ -648,8 +652,6 @@ class PipelineGenerator(BaseAppGenerator): workflow: Workflow, queue_manager: AppQueueManager, user: Union[Account, EndUser], - workflow_execution_repository: WorkflowExecutionRepository, - workflow_node_execution_repository: WorkflowNodeExecutionRepository, draft_var_saver_factory: DraftVariableSaverFactory, stream: bool = False, ) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: @@ -660,7 +662,6 @@ class PipelineGenerator(BaseAppGenerator): :param queue_manager: queue manager :param user: account or end user :param stream: is stream - :param workflow_node_execution_repository: optional repository for workflow node execution :return: """ # init generate task pipeline @@ -670,8 +671,6 @@ class PipelineGenerator(BaseAppGenerator): queue_manager=queue_manager, user=user, stream=stream, - workflow_node_execution_repository=workflow_node_execution_repository, - workflow_execution_repository=workflow_execution_repository, draft_var_saver_factory=draft_var_saver_factory, ) diff --git a/api/core/app/apps/pipeline/pipeline_runner.py b/api/core/app/apps/pipeline/pipeline_runner.py index a8a7dde2b4..4be9e01fbf 100644 --- a/api/core/app/apps/pipeline/pipeline_runner.py +++ b/api/core/app/apps/pipeline/pipeline_runner.py @@ -11,11 +11,14 @@ from core.app.entities.app_invoke_entities import ( ) from core.variables.variables import RAGPipelineVariable, RAGPipelineVariableInput from core.workflow.entities.graph_init_params import GraphInitParams -from core.workflow.entities.graph_runtime_state import GraphRuntimeState -from core.workflow.entities.variable_pool import VariablePool +from core.workflow.enums import WorkflowType from core.workflow.graph import Graph +from core.workflow.graph_engine.layers.persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer from core.workflow.graph_events import GraphEngineEvent, GraphRunFailedEvent from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository +from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import VariableLoader from core.workflow.workflow_entry import WorkflowEntry @@ -40,6 +43,8 @@ class PipelineRunner(WorkflowBasedAppRunner): variable_loader: VariableLoader, workflow: Workflow, system_user_id: str, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, workflow_thread_pool_id: str | None = None, ) -> None: """ @@ -56,6 +61,8 @@ class PipelineRunner(WorkflowBasedAppRunner): self.workflow_thread_pool_id = workflow_thread_pool_id self._workflow = workflow self._sys_user_id = system_user_id + self._workflow_execution_repository = workflow_execution_repository + self._workflow_node_execution_repository = workflow_node_execution_repository def _get_app_id(self) -> str: return self.application_generate_entity.app_config.app_id @@ -163,6 +170,23 @@ class PipelineRunner(WorkflowBasedAppRunner): variable_pool=variable_pool, ) + self._queue_manager.graph_runtime_state = graph_runtime_state + + persistence_layer = WorkflowPersistenceLayer( + application_generate_entity=self.application_generate_entity, + workflow_info=PersistenceWorkflowInfo( + workflow_id=workflow.id, + workflow_type=WorkflowType(workflow.type), + version=workflow.version, + graph_data=workflow.graph_dict, + ), + workflow_execution_repository=self._workflow_execution_repository, + workflow_node_execution_repository=self._workflow_node_execution_repository, + trace_manager=self.application_generate_entity.trace_manager, + ) + + workflow_entry.graph_engine.layer(persistence_layer) + generator = workflow_entry.run() for event in generator: diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index 45d047434b..f22ef5431e 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -231,6 +231,8 @@ class WorkflowAppGenerator(BaseAppGenerator): "queue_manager": queue_manager, "context": context, "variable_loader": variable_loader, + "workflow_execution_repository": workflow_execution_repository, + "workflow_node_execution_repository": workflow_node_execution_repository, }, ) @@ -244,8 +246,6 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow=workflow, queue_manager=queue_manager, user=user, - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, draft_var_saver_factory=draft_var_saver_factory, stream=streaming, ) @@ -424,6 +424,8 @@ class WorkflowAppGenerator(BaseAppGenerator): queue_manager: AppQueueManager, context: contextvars.Context, variable_loader: VariableLoader, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, ) -> None: """ Generate worker in a new thread. @@ -465,6 +467,8 @@ class WorkflowAppGenerator(BaseAppGenerator): variable_loader=variable_loader, workflow=workflow, system_user_id=system_user_id, + workflow_execution_repository=workflow_execution_repository, + workflow_node_execution_repository=workflow_node_execution_repository, ) try: @@ -493,8 +497,6 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow: Workflow, queue_manager: AppQueueManager, user: Union[Account, EndUser], - workflow_execution_repository: WorkflowExecutionRepository, - workflow_node_execution_repository: WorkflowNodeExecutionRepository, draft_var_saver_factory: DraftVariableSaverFactory, stream: bool = False, ) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: @@ -514,8 +516,6 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow=workflow, queue_manager=queue_manager, user=user, - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, draft_var_saver_factory=draft_var_saver_factory, stream=stream, ) diff --git a/api/core/app/apps/workflow/app_runner.py b/api/core/app/apps/workflow/app_runner.py index 943ae8ab4e..3c9bf176b5 100644 --- a/api/core/app/apps/workflow/app_runner.py +++ b/api/core/app/apps/workflow/app_runner.py @@ -5,12 +5,13 @@ from typing import cast from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.apps.workflow.app_config_manager import WorkflowAppConfig from core.app.apps.workflow_app_runner import WorkflowBasedAppRunner -from core.app.entities.app_invoke_entities import ( - InvokeFrom, - WorkflowAppGenerateEntity, -) -from core.workflow.entities import GraphRuntimeState, VariablePool +from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity +from core.workflow.enums import WorkflowType from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel +from core.workflow.graph_engine.layers.persistence import PersistenceWorkflowInfo, WorkflowPersistenceLayer +from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository +from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import VariableLoader from core.workflow.workflow_entry import WorkflowEntry @@ -34,6 +35,8 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): variable_loader: VariableLoader, workflow: Workflow, system_user_id: str, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, ): super().__init__( queue_manager=queue_manager, @@ -43,6 +46,8 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): self.application_generate_entity = application_generate_entity self._workflow = workflow self._sys_user_id = system_user_id + self._workflow_execution_repository = workflow_execution_repository + self._workflow_node_execution_repository = workflow_node_execution_repository def run(self): """ @@ -51,6 +56,14 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): app_config = self.application_generate_entity.app_config app_config = cast(WorkflowAppConfig, app_config) + system_inputs = SystemVariable( + files=self.application_generate_entity.files, + user_id=self._sys_user_id, + app_id=app_config.app_id, + workflow_id=app_config.workflow_id, + workflow_execution_id=self.application_generate_entity.workflow_execution_id, + ) + # if only single iteration or single loop run is requested if self.application_generate_entity.single_iteration_run or self.application_generate_entity.single_loop_run: graph, variable_pool, graph_runtime_state = self._prepare_single_node_execution( @@ -60,18 +73,9 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): ) else: inputs = self.application_generate_entity.inputs - files = self.application_generate_entity.files # Create a variable pool. - system_inputs = SystemVariable( - files=files, - user_id=self._sys_user_id, - app_id=app_config.app_id, - workflow_id=app_config.workflow_id, - workflow_execution_id=self.application_generate_entity.workflow_execution_id, - ) - variable_pool = VariablePool( system_variables=system_inputs, user_inputs=inputs, @@ -96,6 +100,8 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): channel_key = f"workflow:{task_id}:commands" command_channel = RedisChannel(redis_client, channel_key) + self._queue_manager.graph_runtime_state = graph_runtime_state + workflow_entry = WorkflowEntry( tenant_id=self._workflow.tenant_id, app_id=self._workflow.app_id, @@ -115,6 +121,21 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): command_channel=command_channel, ) + persistence_layer = WorkflowPersistenceLayer( + application_generate_entity=self.application_generate_entity, + workflow_info=PersistenceWorkflowInfo( + workflow_id=self._workflow.id, + workflow_type=WorkflowType(self._workflow.type), + version=self._workflow.version, + graph_data=self._workflow.graph_dict, + ), + workflow_execution_repository=self._workflow_execution_repository, + workflow_node_execution_repository=self._workflow_node_execution_repository, + trace_manager=self.application_generate_entity.trace_manager, + ) + + workflow_entry.graph_engine.layer(persistence_layer) + generator = workflow_entry.run() for event in generator: diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index 56b0d91141..08e2fce48c 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -8,11 +8,9 @@ from sqlalchemy.orm import Session from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME from core.app.apps.base_app_queue_manager import AppQueueManager +from core.app.apps.common.graph_runtime_state_support import GraphRuntimeStateSupport from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter -from core.app.entities.app_invoke_entities import ( - InvokeFrom, - WorkflowAppGenerateEntity, -) +from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity from core.app.entities.queue_entities import ( AppQueueEvent, MessageQueueMessage, @@ -53,27 +51,20 @@ from core.app.entities.task_entities import ( from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk from core.ops.ops_trace_manager import TraceQueueManager -from core.workflow.entities import GraphRuntimeState, WorkflowExecution -from core.workflow.enums import WorkflowExecutionStatus, WorkflowType +from core.workflow.enums import WorkflowExecutionStatus from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory -from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository -from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.runtime import GraphRuntimeState from core.workflow.system_variable import SystemVariable -from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager from extensions.ext_database import db -from models.account import Account +from models import Account from models.enums import CreatorUserRole from models.model import EndUser -from models.workflow import ( - Workflow, - WorkflowAppLog, - WorkflowAppLogCreatedFrom, -) +from models.workflow import Workflow, WorkflowAppLog, WorkflowAppLogCreatedFrom logger = logging.getLogger(__name__) -class WorkflowAppGenerateTaskPipeline: +class WorkflowAppGenerateTaskPipeline(GraphRuntimeStateSupport): """ WorkflowAppGenerateTaskPipeline is a class that generate stream output and state management for Application. """ @@ -85,8 +76,6 @@ class WorkflowAppGenerateTaskPipeline: queue_manager: AppQueueManager, user: Union[Account, EndUser], stream: bool, - workflow_execution_repository: WorkflowExecutionRepository, - workflow_node_execution_repository: WorkflowNodeExecutionRepository, draft_var_saver_factory: DraftVariableSaverFactory, ): self._base_task_pipeline = BasedGenerateTaskPipeline( @@ -99,42 +88,30 @@ class WorkflowAppGenerateTaskPipeline: self._user_id = user.id user_session_id = user.session_id self._created_by_role = CreatorUserRole.END_USER - elif isinstance(user, Account): + else: self._user_id = user.id user_session_id = user.id self._created_by_role = CreatorUserRole.ACCOUNT - else: - raise ValueError(f"Invalid user type: {type(user)}") - - self._workflow_cycle_manager = WorkflowCycleManager( - application_generate_entity=application_generate_entity, - workflow_system_variables=SystemVariable( - files=application_generate_entity.files, - user_id=user_session_id, - app_id=application_generate_entity.app_config.app_id, - workflow_id=workflow.id, - workflow_execution_id=application_generate_entity.workflow_execution_id, - ), - workflow_info=CycleManagerWorkflowInfo( - workflow_id=workflow.id, - workflow_type=WorkflowType(workflow.type), - version=workflow.version, - graph_data=workflow.graph_dict, - ), - workflow_execution_repository=workflow_execution_repository, - workflow_node_execution_repository=workflow_node_execution_repository, - ) - - self._workflow_response_converter = WorkflowResponseConverter( - application_generate_entity=application_generate_entity, - user=user, - ) self._application_generate_entity = application_generate_entity self._workflow_features_dict = workflow.features_dict - self._workflow_run_id = "" + self._workflow_execution_id = "" self._invoke_from = queue_manager.invoke_from self._draft_var_saver_factory = draft_var_saver_factory + self._workflow = workflow + self._workflow_system_variables = SystemVariable( + files=application_generate_entity.files, + user_id=user_session_id, + app_id=application_generate_entity.app_config.app_id, + workflow_id=workflow.id, + workflow_execution_id=application_generate_entity.workflow_execution_id, + ) + self._workflow_response_converter = WorkflowResponseConverter( + application_generate_entity=application_generate_entity, + user=user, + system_variables=self._workflow_system_variables, + ) + self._graph_runtime_state: GraphRuntimeState | None = self._base_task_pipeline.queue_manager.graph_runtime_state def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: """ @@ -261,15 +238,9 @@ class WorkflowAppGenerateTaskPipeline: def _ensure_workflow_initialized(self): """Fluent validation for workflow state.""" - if not self._workflow_run_id: + if not self._workflow_execution_id: raise ValueError("workflow run not initialized.") - def _ensure_graph_runtime_initialized(self, graph_runtime_state: GraphRuntimeState | None) -> GraphRuntimeState: - """Fluent validation for graph runtime state.""" - if not graph_runtime_state: - raise ValueError("graph runtime state not initialized.") - return graph_runtime_state - def _handle_ping_event(self, event: QueuePingEvent, **kwargs) -> Generator[PingStreamResponse, None, None]: """Handle ping events.""" yield self._base_task_pipeline.ping_stream_response() @@ -283,12 +254,14 @@ class WorkflowAppGenerateTaskPipeline: self, event: QueueWorkflowStartedEvent, **kwargs ) -> Generator[StreamResponse, None, None]: """Handle workflow started events.""" - # init workflow run - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_start() - self._workflow_run_id = workflow_execution.id_ + runtime_state = self._resolve_graph_runtime_state() + + run_id = self._extract_workflow_run_id(runtime_state) + self._workflow_execution_id = run_id start_resp = self._workflow_response_converter.workflow_start_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, + workflow_run_id=run_id, + workflow_id=self._workflow.id, ) yield start_resp @@ -296,14 +269,9 @@ class WorkflowAppGenerateTaskPipeline: """Handle node retry events.""" self._ensure_workflow_initialized() - workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried( - workflow_execution_id=self._workflow_run_id, - event=event, - ) response = self._workflow_response_converter.workflow_node_retry_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) if response: @@ -315,13 +283,9 @@ class WorkflowAppGenerateTaskPipeline: """Handle node started events.""" self._ensure_workflow_initialized() - workflow_node_execution = self._workflow_cycle_manager.handle_node_execution_start( - workflow_execution_id=self._workflow_run_id, event=event - ) node_start_response = self._workflow_response_converter.workflow_node_start_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) if node_start_response: @@ -331,14 +295,12 @@ class WorkflowAppGenerateTaskPipeline: self, event: QueueNodeSucceededEvent, **kwargs ) -> Generator[StreamResponse, None, None]: """Handle node succeeded events.""" - workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_success(event=event) node_success_response = self._workflow_response_converter.workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) - self._save_output_for_event(event, workflow_node_execution.id) + self._save_output_for_event(event, event.node_execution_id) if node_success_response: yield node_success_response @@ -349,17 +311,13 @@ class WorkflowAppGenerateTaskPipeline: **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle various node failure events.""" - workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_failed( - event=event, - ) node_failed_response = self._workflow_response_converter.workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, ) if isinstance(event, QueueNodeExceptionEvent): - self._save_output_for_event(event, workflow_node_execution.id) + self._save_output_for_event(event, event.node_execution_id) if node_failed_response: yield node_failed_response @@ -372,7 +330,7 @@ class WorkflowAppGenerateTaskPipeline: iter_start_resp = self._workflow_response_converter.workflow_iteration_start_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution_id=self._workflow_run_id, + workflow_execution_id=self._workflow_execution_id, event=event, ) yield iter_start_resp @@ -385,7 +343,7 @@ class WorkflowAppGenerateTaskPipeline: iter_next_resp = self._workflow_response_converter.workflow_iteration_next_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution_id=self._workflow_run_id, + workflow_execution_id=self._workflow_execution_id, event=event, ) yield iter_next_resp @@ -398,7 +356,7 @@ class WorkflowAppGenerateTaskPipeline: iter_finish_resp = self._workflow_response_converter.workflow_iteration_completed_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution_id=self._workflow_run_id, + workflow_execution_id=self._workflow_execution_id, event=event, ) yield iter_finish_resp @@ -409,7 +367,7 @@ class WorkflowAppGenerateTaskPipeline: loop_start_resp = self._workflow_response_converter.workflow_loop_start_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution_id=self._workflow_run_id, + workflow_execution_id=self._workflow_execution_id, event=event, ) yield loop_start_resp @@ -420,7 +378,7 @@ class WorkflowAppGenerateTaskPipeline: loop_next_resp = self._workflow_response_converter.workflow_loop_next_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution_id=self._workflow_run_id, + workflow_execution_id=self._workflow_execution_id, event=event, ) yield loop_next_resp @@ -433,7 +391,7 @@ class WorkflowAppGenerateTaskPipeline: loop_finish_resp = self._workflow_response_converter.workflow_loop_completed_to_stream_response( task_id=self._application_generate_entity.task_id, - workflow_execution_id=self._workflow_run_id, + workflow_execution_id=self._workflow_execution_id, event=event, ) yield loop_finish_resp @@ -442,33 +400,22 @@ class WorkflowAppGenerateTaskPipeline: self, event: QueueWorkflowSucceededEvent, *, - graph_runtime_state: GraphRuntimeState | None = None, trace_manager: TraceQueueManager | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle workflow succeeded events.""" + _ = trace_manager self._ensure_workflow_initialized() - validated_state = self._ensure_graph_runtime_initialized(graph_runtime_state) + validated_state = self._ensure_graph_runtime_initialized() + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow.id, + status=WorkflowExecutionStatus.SUCCEEDED, + graph_runtime_state=validated_state, + ) with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_success( - workflow_run_id=self._workflow_run_id, - total_tokens=validated_state.total_tokens, - total_steps=validated_state.node_run_steps, - outputs=event.outputs, - conversation_id=None, - trace_manager=trace_manager, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - - # save workflow app log - self._save_workflow_app_log(session=session, workflow_execution=workflow_execution) - - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) + self._save_workflow_app_log(session=session, workflow_run_id=self._workflow_execution_id) yield workflow_finish_resp @@ -476,34 +423,23 @@ class WorkflowAppGenerateTaskPipeline: self, event: QueueWorkflowPartialSuccessEvent, *, - graph_runtime_state: GraphRuntimeState | None = None, trace_manager: TraceQueueManager | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle workflow partial success events.""" + _ = trace_manager self._ensure_workflow_initialized() - validated_state = self._ensure_graph_runtime_initialized(graph_runtime_state) + validated_state = self._ensure_graph_runtime_initialized() + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow.id, + status=WorkflowExecutionStatus.PARTIAL_SUCCEEDED, + graph_runtime_state=validated_state, + exceptions_count=event.exceptions_count, + ) with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_partial_success( - workflow_run_id=self._workflow_run_id, - total_tokens=validated_state.total_tokens, - total_steps=validated_state.node_run_steps, - outputs=event.outputs, - exceptions_count=event.exceptions_count, - conversation_id=None, - trace_manager=trace_manager, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - - # save workflow app log - self._save_workflow_app_log(session=session, workflow_execution=workflow_execution) - - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) + self._save_workflow_app_log(session=session, workflow_run_id=self._workflow_execution_id) yield workflow_finish_resp @@ -511,37 +447,33 @@ class WorkflowAppGenerateTaskPipeline: self, event: Union[QueueWorkflowFailedEvent, QueueStopEvent], *, - graph_runtime_state: GraphRuntimeState | None = None, trace_manager: TraceQueueManager | None = None, **kwargs, ) -> Generator[StreamResponse, None, None]: """Handle workflow failed and stop events.""" + _ = trace_manager self._ensure_workflow_initialized() - validated_state = self._ensure_graph_runtime_initialized(graph_runtime_state) + validated_state = self._ensure_graph_runtime_initialized() + + if isinstance(event, QueueWorkflowFailedEvent): + status = WorkflowExecutionStatus.FAILED + error = event.error + exceptions_count = event.exceptions_count + else: + status = WorkflowExecutionStatus.STOPPED + error = event.get_stop_reason() + exceptions_count = 0 + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_id=self._workflow.id, + status=status, + graph_runtime_state=validated_state, + error=error, + exceptions_count=exceptions_count, + ) with self._database_session() as session: - workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed( - workflow_run_id=self._workflow_run_id, - total_tokens=validated_state.total_tokens, - total_steps=validated_state.node_run_steps, - status=WorkflowExecutionStatus.FAILED - if isinstance(event, QueueWorkflowFailedEvent) - else WorkflowExecutionStatus.STOPPED, - error_message=event.error if isinstance(event, QueueWorkflowFailedEvent) else event.get_stop_reason(), - conversation_id=None, - trace_manager=trace_manager, - exceptions_count=event.exceptions_count if isinstance(event, QueueWorkflowFailedEvent) else 0, - external_trace_id=self._application_generate_entity.extras.get("external_trace_id"), - ) - - # save workflow app log - self._save_workflow_app_log(session=session, workflow_execution=workflow_execution) - - workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_execution=workflow_execution, - ) + self._save_workflow_app_log(session=session, workflow_run_id=self._workflow_execution_id) yield workflow_finish_resp @@ -601,7 +533,6 @@ class WorkflowAppGenerateTaskPipeline: self, event: AppQueueEvent, *, - graph_runtime_state: GraphRuntimeState | None = None, tts_publisher: AppGeneratorTTSPublisher | None = None, trace_manager: TraceQueueManager | None = None, queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None, @@ -614,7 +545,6 @@ class WorkflowAppGenerateTaskPipeline: if handler := handlers.get(event_type): yield from handler( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -631,7 +561,6 @@ class WorkflowAppGenerateTaskPipeline: ): yield from self._handle_node_failed_events( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -642,7 +571,6 @@ class WorkflowAppGenerateTaskPipeline: if isinstance(event, (QueueWorkflowFailedEvent, QueueStopEvent)): yield from self._handle_workflow_failed_and_stop_events( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -661,15 +589,12 @@ class WorkflowAppGenerateTaskPipeline: Process stream response using elegant Fluent Python patterns. Maintains exact same functionality as original 44-if-statement version. """ - # Initialize graph runtime state - graph_runtime_state = None - for queue_message in self._base_task_pipeline.queue_manager.listen(): event = queue_message.event match event: case QueueWorkflowStartedEvent(): - graph_runtime_state = event.graph_runtime_state + self._resolve_graph_runtime_state() yield from self._handle_workflow_started_event(event) case QueueTextChunkEvent(): @@ -681,12 +606,19 @@ class WorkflowAppGenerateTaskPipeline: yield from self._handle_error_event(event) break + case QueueWorkflowFailedEvent(): + yield from self._handle_workflow_failed_and_stop_events(event) + break + + case QueueStopEvent(): + yield from self._handle_workflow_failed_and_stop_events(event) + break + # Handle all other events through elegant dispatch case _: if responses := list( self._dispatch_event( event, - graph_runtime_state=graph_runtime_state, tts_publisher=tts_publisher, trace_manager=trace_manager, queue_message=queue_message, @@ -697,7 +629,7 @@ class WorkflowAppGenerateTaskPipeline: if tts_publisher: tts_publisher.publish(None) - def _save_workflow_app_log(self, *, session: Session, workflow_execution: WorkflowExecution): + def _save_workflow_app_log(self, *, session: Session, workflow_run_id: str | None): invoke_from = self._application_generate_entity.invoke_from if invoke_from == InvokeFrom.SERVICE_API: created_from = WorkflowAppLogCreatedFrom.SERVICE_API @@ -709,11 +641,14 @@ class WorkflowAppGenerateTaskPipeline: # not save log for debugging return + if not workflow_run_id: + return + workflow_app_log = WorkflowAppLog() workflow_app_log.tenant_id = self._application_generate_entity.app_config.tenant_id workflow_app_log.app_id = self._application_generate_entity.app_config.app_id - workflow_app_log.workflow_id = workflow_execution.workflow_id - workflow_app_log.workflow_run_id = workflow_execution.id_ + workflow_app_log.workflow_id = self._workflow.id + workflow_app_log.workflow_run_id = workflow_run_id workflow_app_log.created_from = created_from.value workflow_app_log.created_by_role = self._created_by_role workflow_app_log.created_by = self._user_id diff --git a/api/core/app/apps/workflow_app_runner.py b/api/core/app/apps/workflow_app_runner.py index 68eb455d26..5e2bd17f8c 100644 --- a/api/core/app/apps/workflow_app_runner.py +++ b/api/core/app/apps/workflow_app_runner.py @@ -25,7 +25,7 @@ from core.app.entities.queue_entities import ( QueueWorkflowStartedEvent, QueueWorkflowSucceededEvent, ) -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.graph_events import ( GraphEngineEvent, @@ -54,6 +54,7 @@ from core.workflow.graph_events.graph import GraphRunAbortedEvent from core.workflow.nodes import NodeType from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader, load_into_variable_pool from core.workflow.workflow_entry import WorkflowEntry @@ -346,9 +347,7 @@ class WorkflowBasedAppRunner: :param event: event """ if isinstance(event, GraphRunStartedEvent): - self._publish_event( - QueueWorkflowStartedEvent(graph_runtime_state=workflow_entry.graph_engine.graph_runtime_state) - ) + self._publish_event(QueueWorkflowStartedEvent()) elif isinstance(event, GraphRunSucceededEvent): self._publish_event(QueueWorkflowSucceededEvent(outputs=event.outputs)) elif isinstance(event, GraphRunPartialSucceededEvent): @@ -372,7 +371,6 @@ class WorkflowBasedAppRunner: node_title=event.node_title, node_type=event.node_type, start_at=event.start_at, - predecessor_node_id=event.predecessor_node_id, in_iteration_id=event.in_iteration_id, in_loop_id=event.in_loop_id, inputs=inputs, @@ -393,7 +391,6 @@ class WorkflowBasedAppRunner: node_title=event.node_title, node_type=event.node_type, start_at=event.start_at, - predecessor_node_id=event.predecessor_node_id, in_iteration_id=event.in_iteration_id, in_loop_id=event.in_loop_id, agent_strategy=event.agent_strategy, @@ -494,7 +491,6 @@ class WorkflowBasedAppRunner: start_at=event.start_at, node_run_index=workflow_entry.graph_engine.graph_runtime_state.node_run_steps, inputs=event.inputs, - predecessor_node_id=event.predecessor_node_id, metadata=event.metadata, ) ) @@ -536,7 +532,6 @@ class WorkflowBasedAppRunner: start_at=event.start_at, node_run_index=workflow_entry.graph_engine.graph_runtime_state.node_run_steps, inputs=event.inputs, - predecessor_node_id=event.predecessor_node_id, metadata=event.metadata, ) ) diff --git a/api/core/app/entities/queue_entities.py b/api/core/app/entities/queue_entities.py index 76d22d8ac3..77d6bf03b4 100644 --- a/api/core/app/entities/queue_entities.py +++ b/api/core/app/entities/queue_entities.py @@ -3,11 +3,11 @@ from datetime import datetime from enum import StrEnum, auto from typing import Any -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk from core.rag.entities.citation_metadata import RetrievalSourceMetadata -from core.workflow.entities import AgentNodeStrategyInit, GraphRuntimeState +from core.workflow.entities import AgentNodeStrategyInit from core.workflow.enums import WorkflowNodeExecutionMetadataKey from core.workflow.nodes import NodeType @@ -54,6 +54,7 @@ class AppQueueEvent(BaseModel): """ event: QueueEvent + model_config = ConfigDict(arbitrary_types_allowed=True) class QueueLLMChunkEvent(AppQueueEvent): @@ -80,7 +81,6 @@ class QueueIterationStartEvent(AppQueueEvent): node_run_index: int inputs: Mapping[str, object] = Field(default_factory=dict) - predecessor_node_id: str | None = None metadata: Mapping[str, object] = Field(default_factory=dict) @@ -132,19 +132,10 @@ class QueueLoopStartEvent(AppQueueEvent): node_id: str node_type: NodeType node_title: str - parallel_id: str | None = None - """parallel id if node is in parallel""" - parallel_start_node_id: str | None = None - """parallel start node id if node is in parallel""" - parent_parallel_id: str | None = None - """parent parallel id if node is in parallel""" - parent_parallel_start_node_id: str | None = None - """parent parallel start node id if node is in parallel""" start_at: datetime node_run_index: int inputs: Mapping[str, object] = Field(default_factory=dict) - predecessor_node_id: str | None = None metadata: Mapping[str, object] = Field(default_factory=dict) @@ -160,16 +151,6 @@ class QueueLoopNextEvent(AppQueueEvent): node_id: str node_type: NodeType node_title: str - parallel_id: str | None = None - """parallel id if node is in parallel""" - parallel_start_node_id: str | None = None - """parallel start node id if node is in parallel""" - parent_parallel_id: str | None = None - """parent parallel id if node is in parallel""" - parent_parallel_start_node_id: str | None = None - """parent parallel start node id if node is in parallel""" - parallel_mode_run_id: str | None = None - """iteration run in parallel mode run id""" node_run_index: int output: Any = None # output for the current loop @@ -185,14 +166,6 @@ class QueueLoopCompletedEvent(AppQueueEvent): node_id: str node_type: NodeType node_title: str - parallel_id: str | None = None - """parallel id if node is in parallel""" - parallel_start_node_id: str | None = None - """parallel start node id if node is in parallel""" - parent_parallel_id: str | None = None - """parent parallel id if node is in parallel""" - parent_parallel_start_node_id: str | None = None - """parent parallel start node id if node is in parallel""" start_at: datetime node_run_index: int @@ -285,12 +258,9 @@ class QueueAdvancedChatMessageEndEvent(AppQueueEvent): class QueueWorkflowStartedEvent(AppQueueEvent): - """ - QueueWorkflowStartedEvent entity - """ + """QueueWorkflowStartedEvent entity.""" event: QueueEvent = QueueEvent.WORKFLOW_STARTED - graph_runtime_state: GraphRuntimeState class QueueWorkflowSucceededEvent(AppQueueEvent): @@ -334,15 +304,9 @@ class QueueNodeStartedEvent(AppQueueEvent): node_title: str node_type: NodeType node_run_index: int = 1 # FIXME(-LAN-): may not used - predecessor_node_id: str | None = None - parallel_id: str | None = None - parallel_start_node_id: str | None = None - parent_parallel_id: str | None = None - parent_parallel_start_node_id: str | None = None in_iteration_id: str | None = None in_loop_id: str | None = None start_at: datetime - parallel_mode_run_id: str | None = None agent_strategy: AgentNodeStrategyInit | None = None # FIXME(-LAN-): only for ToolNode, need to refactor @@ -360,14 +324,6 @@ class QueueNodeSucceededEvent(AppQueueEvent): node_execution_id: str node_id: str node_type: NodeType - parallel_id: str | None = None - """parallel id if node is in parallel""" - parallel_start_node_id: str | None = None - """parallel start node id if node is in parallel""" - parent_parallel_id: str | None = None - """parent parallel id if node is in parallel""" - parent_parallel_start_node_id: str | None = None - """parent parallel start node id if node is in parallel""" in_iteration_id: str | None = None """iteration id if node is in iteration""" in_loop_id: str | None = None @@ -423,14 +379,6 @@ class QueueNodeExceptionEvent(AppQueueEvent): node_execution_id: str node_id: str node_type: NodeType - parallel_id: str | None = None - """parallel id if node is in parallel""" - parallel_start_node_id: str | None = None - """parallel start node id if node is in parallel""" - parent_parallel_id: str | None = None - """parent parallel id if node is in parallel""" - parent_parallel_start_node_id: str | None = None - """parent parallel start node id if node is in parallel""" in_iteration_id: str | None = None """iteration id if node is in iteration""" in_loop_id: str | None = None @@ -455,7 +403,6 @@ class QueueNodeFailedEvent(AppQueueEvent): node_execution_id: str node_id: str node_type: NodeType - parallel_id: str | None = None in_iteration_id: str | None = None """iteration id if node is in iteration""" in_loop_id: str | None = None diff --git a/api/core/app/entities/task_entities.py b/api/core/app/entities/task_entities.py index 31dc1eea89..72a92add04 100644 --- a/api/core/app/entities/task_entities.py +++ b/api/core/app/entities/task_entities.py @@ -257,13 +257,8 @@ class NodeStartStreamResponse(StreamResponse): inputs_truncated: bool = False created_at: int extras: dict[str, object] = Field(default_factory=dict) - parallel_id: str | None = None - parallel_start_node_id: str | None = None - parent_parallel_id: str | None = None - parent_parallel_start_node_id: str | None = None iteration_id: str | None = None loop_id: str | None = None - parallel_run_id: str | None = None agent_strategy: AgentNodeStrategyInit | None = None event: StreamEvent = StreamEvent.NODE_STARTED @@ -285,10 +280,6 @@ class NodeStartStreamResponse(StreamResponse): "inputs": None, "created_at": self.data.created_at, "extras": {}, - "parallel_id": self.data.parallel_id, - "parallel_start_node_id": self.data.parallel_start_node_id, - "parent_parallel_id": self.data.parent_parallel_id, - "parent_parallel_start_node_id": self.data.parent_parallel_start_node_id, "iteration_id": self.data.iteration_id, "loop_id": self.data.loop_id, }, @@ -324,10 +315,6 @@ class NodeFinishStreamResponse(StreamResponse): created_at: int finished_at: int files: Sequence[Mapping[str, Any]] | None = [] - parallel_id: str | None = None - parallel_start_node_id: str | None = None - parent_parallel_id: str | None = None - parent_parallel_start_node_id: str | None = None iteration_id: str | None = None loop_id: str | None = None @@ -357,10 +344,6 @@ class NodeFinishStreamResponse(StreamResponse): "created_at": self.data.created_at, "finished_at": self.data.finished_at, "files": [], - "parallel_id": self.data.parallel_id, - "parallel_start_node_id": self.data.parallel_start_node_id, - "parent_parallel_id": self.data.parent_parallel_id, - "parent_parallel_start_node_id": self.data.parent_parallel_start_node_id, "iteration_id": self.data.iteration_id, "loop_id": self.data.loop_id, }, @@ -396,10 +379,6 @@ class NodeRetryStreamResponse(StreamResponse): created_at: int finished_at: int files: Sequence[Mapping[str, Any]] | None = [] - parallel_id: str | None = None - parallel_start_node_id: str | None = None - parent_parallel_id: str | None = None - parent_parallel_start_node_id: str | None = None iteration_id: str | None = None loop_id: str | None = None retry_index: int = 0 @@ -430,10 +409,6 @@ class NodeRetryStreamResponse(StreamResponse): "created_at": self.data.created_at, "finished_at": self.data.finished_at, "files": [], - "parallel_id": self.data.parallel_id, - "parallel_start_node_id": self.data.parallel_start_node_id, - "parent_parallel_id": self.data.parent_parallel_id, - "parent_parallel_start_node_id": self.data.parent_parallel_start_node_id, "iteration_id": self.data.iteration_id, "loop_id": self.data.loop_id, "retry_index": self.data.retry_index, @@ -541,8 +516,6 @@ class LoopNodeStartStreamResponse(StreamResponse): metadata: Mapping = {} inputs: Mapping = {} inputs_truncated: bool = False - parallel_id: str | None = None - parallel_start_node_id: str | None = None event: StreamEvent = StreamEvent.LOOP_STARTED workflow_run_id: str @@ -567,9 +540,6 @@ class LoopNodeNextStreamResponse(StreamResponse): created_at: int pre_loop_output: Any = None extras: Mapping[str, object] = Field(default_factory=dict) - parallel_id: str | None = None - parallel_start_node_id: str | None = None - parallel_mode_run_id: str | None = None event: StreamEvent = StreamEvent.LOOP_NEXT workflow_run_id: str @@ -603,8 +573,6 @@ class LoopNodeCompletedStreamResponse(StreamResponse): execution_metadata: Mapping[str, object] = Field(default_factory=dict) finished_at: int steps: int - parallel_id: str | None = None - parallel_start_node_id: str | None = None event: StreamEvent = StreamEvent.LOOP_COMPLETED workflow_run_id: str diff --git a/api/core/datasource/datasource_manager.py b/api/core/datasource/datasource_manager.py index 47d297e194..002415a7db 100644 --- a/api/core/datasource/datasource_manager.py +++ b/api/core/datasource/datasource_manager.py @@ -1,11 +1,9 @@ import logging from threading import Lock -from typing import Union import contexts from core.datasource.__base.datasource_plugin import DatasourcePlugin from core.datasource.__base.datasource_provider import DatasourcePluginProviderController -from core.datasource.entities.common_entities import I18nObject from core.datasource.entities.datasource_entities import DatasourceProviderType from core.datasource.errors import DatasourceProviderNotFoundError from core.datasource.local_file.local_file_provider import LocalFileDatasourcePluginProviderController @@ -18,11 +16,6 @@ logger = logging.getLogger(__name__) class DatasourceManager: - _builtin_provider_lock = Lock() - _hardcoded_providers: dict[str, DatasourcePluginProviderController] = {} - _builtin_providers_loaded = False - _builtin_tools_labels: dict[str, Union[I18nObject, None]] = {} - @classmethod def get_datasource_plugin_provider( cls, provider_id: str, tenant_id: str, datasource_type: DatasourceProviderType diff --git a/api/core/entities/provider_configuration.py b/api/core/entities/provider_configuration.py index 29b8f8f610..c4be429219 100644 --- a/api/core/entities/provider_configuration.py +++ b/api/core/entities/provider_configuration.py @@ -472,6 +472,9 @@ class ProviderConfiguration(BaseModel): provider_model_credentials_cache.delete() self.switch_preferred_provider_type(provider_type=ProviderType.CUSTOM, session=session) + else: + # some historical data may have a provider record but not be set as valid + provider_record.is_valid = True session.commit() except Exception: @@ -1145,6 +1148,15 @@ class ProviderConfiguration(BaseModel): raise ValueError("Can't add same credential") provider_model_record.credential_id = credential_record.id provider_model_record.updated_at = naive_utc_now() + + # clear cache + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_model_record.id, + cache_type=ProviderCredentialsCacheType.MODEL, + ) + provider_model_credentials_cache.delete() + session.add(provider_model_record) session.commit() @@ -1178,6 +1190,14 @@ class ProviderConfiguration(BaseModel): session.add(provider_model_record) session.commit() + # clear cache + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_model_record.id, + cache_type=ProviderCredentialsCacheType.MODEL, + ) + provider_model_credentials_cache.delete() + def delete_custom_model(self, model_type: ModelType, model: str): """ Delete custom model. diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index e64ac25ab1..bd893b17f1 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -100,7 +100,7 @@ class LLMGenerator: return name @classmethod - def generate_suggested_questions_after_answer(cls, tenant_id: str, histories: str): + def generate_suggested_questions_after_answer(cls, tenant_id: str, histories: str) -> Sequence[str]: output_parser = SuggestedQuestionsAfterAnswerOutputParser() format_instructions = output_parser.get_format_instructions() @@ -119,6 +119,8 @@ class LLMGenerator: prompt_messages = [UserPromptMessage(content=prompt)] + questions: Sequence[str] = [] + try: response: LLMResult = model_instance.invoke_llm( prompt_messages=list(prompt_messages), diff --git a/api/core/llm_generator/output_parser/suggested_questions_after_answer.py b/api/core/llm_generator/output_parser/suggested_questions_after_answer.py index e78859cc1a..eec771181f 100644 --- a/api/core/llm_generator/output_parser/suggested_questions_after_answer.py +++ b/api/core/llm_generator/output_parser/suggested_questions_after_answer.py @@ -1,17 +1,26 @@ import json +import logging import re +from collections.abc import Sequence from core.llm_generator.prompts import SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT +logger = logging.getLogger(__name__) + class SuggestedQuestionsAfterAnswerOutputParser: def get_format_instructions(self) -> str: return SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT - def parse(self, text: str): + def parse(self, text: str) -> Sequence[str]: action_match = re.search(r"\[.*?\]", text.strip(), re.DOTALL) + questions: list[str] = [] if action_match is not None: - json_obj = json.loads(action_match.group(0).strip()) - else: - json_obj = [] - return json_obj + try: + json_obj = json.loads(action_match.group(0).strip()) + except json.JSONDecodeError as exc: + logger.warning("Failed to decode suggested questions payload: %s", exc) + else: + if isinstance(json_obj, list): + questions = [question for question in json_obj if isinstance(question, str)] + return questions diff --git a/api/core/ops/aliyun_trace/data_exporter/traceclient.py b/api/core/ops/aliyun_trace/data_exporter/traceclient.py index f54405b5de..5aa9fb6689 100644 --- a/api/core/ops/aliyun_trace/data_exporter/traceclient.py +++ b/api/core/ops/aliyun_trace/data_exporter/traceclient.py @@ -7,7 +7,7 @@ import uuid from collections import deque from collections.abc import Sequence from datetime import datetime -from typing import Final +from typing import Final, cast from urllib.parse import urljoin import httpx @@ -199,7 +199,7 @@ def convert_to_trace_id(uuid_v4: str | None) -> int: raise ValueError("UUID cannot be None") try: uuid_obj = uuid.UUID(uuid_v4) - return uuid_obj.int + return cast(int, uuid_obj.int) except ValueError as e: raise ValueError(f"Invalid UUID input: {uuid_v4}") from e diff --git a/api/core/ops/entities/config_entity.py b/api/core/ops/entities/config_entity.py index 4ba6eb0780..f9b8d41e0a 100644 --- a/api/core/ops/entities/config_entity.py +++ b/api/core/ops/entities/config_entity.py @@ -13,6 +13,7 @@ class TracingProviderEnum(StrEnum): OPIK = "opik" WEAVE = "weave" ALIYUN = "aliyun" + TENCENT = "tencent" class BaseTracingConfig(BaseModel): @@ -195,5 +196,32 @@ class AliyunConfig(BaseTracingConfig): return validate_url_with_path(v, "https://tracing-analysis-dc-hz.aliyuncs.com") +class TencentConfig(BaseTracingConfig): + """ + Tencent APM tracing config + """ + + token: str + endpoint: str + service_name: str + + @field_validator("token") + @classmethod + def token_validator(cls, v, info: ValidationInfo): + if not v or v.strip() == "": + raise ValueError("Token cannot be empty") + return v + + @field_validator("endpoint") + @classmethod + def endpoint_validator(cls, v, info: ValidationInfo): + return cls.validate_endpoint_url(v, "https://apm.tencentcloudapi.com") + + @field_validator("service_name") + @classmethod + def service_name_validator(cls, v, info: ValidationInfo): + return cls.validate_project_field(v, "dify_app") + + OPS_FILE_PATH = "ops_trace/" OPS_TRACE_FAILED_KEY = "FAILED_OPS_TRACE" diff --git a/api/core/ops/entities/trace_entity.py b/api/core/ops/entities/trace_entity.py index b8a25c5d7d..5b81c09a2d 100644 --- a/api/core/ops/entities/trace_entity.py +++ b/api/core/ops/entities/trace_entity.py @@ -90,6 +90,7 @@ class SuggestedQuestionTraceInfo(BaseTraceInfo): class DatasetRetrievalTraceInfo(BaseTraceInfo): documents: Any = None + error: str | None = None class ToolTraceInfo(BaseTraceInfo): diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index e181373bd0..7db9b076d2 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -120,6 +120,17 @@ class OpsTraceProviderConfigMap(collections.UserDict[str, dict[str, Any]]): "trace_instance": AliyunDataTrace, } + case TracingProviderEnum.TENCENT: + from core.ops.entities.config_entity import TencentConfig + from core.ops.tencent_trace.tencent_trace import TencentDataTrace + + return { + "config_class": TencentConfig, + "secret_keys": ["token"], + "other_keys": ["endpoint", "service_name"], + "trace_instance": TencentDataTrace, + } + case _: raise KeyError(f"Unsupported tracing provider: {provider}") @@ -723,6 +734,7 @@ class TraceTask: end_time=timer.get("end"), metadata=metadata, message_data=message_data.to_dict(), + error=kwargs.get("error"), ) return dataset_retrieval_trace_info @@ -889,6 +901,7 @@ class TraceQueueManager: continue file_id = uuid4().hex trace_info = task.execute() + task_data = TaskData( app_id=task.app_id, trace_info_type=type(trace_info).__name__, @@ -900,4 +913,4 @@ class TraceQueueManager: "file_id": file_id, "app_id": task.app_id, } - process_trace_tasks.delay(file_info) + process_trace_tasks.delay(file_info) # type: ignore diff --git a/api/core/ops/tencent_trace/__init__.py b/api/core/ops/tencent_trace/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/ops/tencent_trace/client.py b/api/core/ops/tencent_trace/client.py new file mode 100644 index 0000000000..270732aa02 --- /dev/null +++ b/api/core/ops/tencent_trace/client.py @@ -0,0 +1,337 @@ +""" +Tencent APM Trace Client - handles network operations, metrics, and API communication +""" + +from __future__ import annotations + +import importlib +import logging +import os +import socket +from typing import TYPE_CHECKING +from urllib.parse import urlparse + +if TYPE_CHECKING: + from opentelemetry.metrics import Meter + from opentelemetry.metrics._internal.instrument import Histogram + from opentelemetry.sdk.metrics.export import MetricReader + +from opentelemetry import trace as trace_api +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.semconv.resource import ResourceAttributes +from opentelemetry.trace import SpanKind +from opentelemetry.util.types import AttributeValue + +from configs import dify_config + +from .entities.tencent_semconv import LLM_OPERATION_DURATION +from .entities.tencent_trace_entity import SpanData + +logger = logging.getLogger(__name__) + + +class TencentTraceClient: + """Tencent APM trace client using OpenTelemetry OTLP exporter""" + + def __init__( + self, + service_name: str, + endpoint: str, + token: str, + max_queue_size: int = 1000, + schedule_delay_sec: int = 5, + max_export_batch_size: int = 50, + metrics_export_interval_sec: int = 10, + ): + self.endpoint = endpoint + self.token = token + self.service_name = service_name + self.metrics_export_interval_sec = metrics_export_interval_sec + + self.resource = Resource( + attributes={ + ResourceAttributes.SERVICE_NAME: service_name, + ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}", + ResourceAttributes.DEPLOYMENT_ENVIRONMENT: f"{dify_config.DEPLOY_ENV}-{dify_config.EDITION}", + ResourceAttributes.HOST_NAME: socket.gethostname(), + } + ) + # Prepare gRPC endpoint/metadata + grpc_endpoint, insecure, _, _ = self._resolve_grpc_target(endpoint) + + headers = (("authorization", f"Bearer {token}"),) + + self.exporter = OTLPSpanExporter( + endpoint=grpc_endpoint, + headers=headers, + insecure=insecure, + timeout=30, + ) + + self.tracer_provider = TracerProvider(resource=self.resource) + self.span_processor = BatchSpanProcessor( + span_exporter=self.exporter, + max_queue_size=max_queue_size, + schedule_delay_millis=schedule_delay_sec * 1000, + max_export_batch_size=max_export_batch_size, + ) + self.tracer_provider.add_span_processor(self.span_processor) + + self.tracer = self.tracer_provider.get_tracer("dify.tencent_apm") + + # Store span contexts for parent-child relationships + self.span_contexts: dict[int, trace_api.SpanContext] = {} + + self.meter: Meter | None = None + self.hist_llm_duration: Histogram | None = None + self.metric_reader: MetricReader | None = None + + # Metrics exporter and instruments + try: + from opentelemetry import metrics + from opentelemetry.sdk.metrics import Histogram, MeterProvider + from opentelemetry.sdk.metrics.export import AggregationTemporality, PeriodicExportingMetricReader + + protocol = os.getenv("OTEL_EXPORTER_OTLP_PROTOCOL", "").strip().lower() + use_http_protobuf = protocol in {"http/protobuf", "http-protobuf"} + use_http_json = protocol in {"http/json", "http-json"} + + # Set preferred temporality for histograms to DELTA + preferred_temporality: dict[type, AggregationTemporality] = {Histogram: AggregationTemporality.DELTA} + + def _create_metric_exporter(exporter_cls, **kwargs): + """Create metric exporter with preferred_temporality support""" + try: + return exporter_cls(**kwargs, preferred_temporality=preferred_temporality) + except Exception: + return exporter_cls(**kwargs) + + metric_reader = None + if use_http_json: + exporter_cls = None + for mod_path in ( + "opentelemetry.exporter.otlp.http.json.metric_exporter", + "opentelemetry.exporter.otlp.json.metric_exporter", + ): + try: + mod = importlib.import_module(mod_path) + exporter_cls = getattr(mod, "OTLPMetricExporter", None) + if exporter_cls: + break + except Exception: + continue + if exporter_cls is not None: + metric_exporter = _create_metric_exporter( + exporter_cls, + endpoint=endpoint, + headers={"authorization": f"Bearer {token}"}, + ) + else: + from opentelemetry.exporter.otlp.proto.http.metric_exporter import ( + OTLPMetricExporter as HttpMetricExporter, + ) + + metric_exporter = _create_metric_exporter( + HttpMetricExporter, + endpoint=endpoint, + headers={"authorization": f"Bearer {token}"}, + ) + metric_reader = PeriodicExportingMetricReader( + metric_exporter, export_interval_millis=self.metrics_export_interval_sec * 1000 + ) + + elif use_http_protobuf: + from opentelemetry.exporter.otlp.proto.http.metric_exporter import ( + OTLPMetricExporter as HttpMetricExporter, + ) + + metric_exporter = _create_metric_exporter( + HttpMetricExporter, + endpoint=endpoint, + headers={"authorization": f"Bearer {token}"}, + ) + metric_reader = PeriodicExportingMetricReader( + metric_exporter, export_interval_millis=self.metrics_export_interval_sec * 1000 + ) + else: + from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import ( + OTLPMetricExporter as GrpcMetricExporter, + ) + + m_grpc_endpoint, m_insecure, _, _ = self._resolve_grpc_target(endpoint) + + metric_exporter = _create_metric_exporter( + GrpcMetricExporter, + endpoint=m_grpc_endpoint, + headers={"authorization": f"Bearer {token}"}, + insecure=m_insecure, + ) + metric_reader = PeriodicExportingMetricReader( + metric_exporter, export_interval_millis=self.metrics_export_interval_sec * 1000 + ) + + if metric_reader is not None: + provider = MeterProvider(resource=self.resource, metric_readers=[metric_reader]) + metrics.set_meter_provider(provider) + self.meter = metrics.get_meter("dify-sdk", dify_config.project.version) + self.hist_llm_duration = self.meter.create_histogram( + name=LLM_OPERATION_DURATION, + unit="s", + description="LLM operation duration (seconds)", + ) + self.metric_reader = metric_reader + else: + self.meter = None + self.hist_llm_duration = None + self.metric_reader = None + except Exception: + logger.exception("[Tencent APM] Metrics initialization failed; metrics disabled") + self.meter = None + self.hist_llm_duration = None + self.metric_reader = None + + def add_span(self, span_data: SpanData) -> None: + """Create and export span using OpenTelemetry Tracer API""" + try: + self._create_and_export_span(span_data) + logger.debug("[Tencent APM] Created span: %s", span_data.name) + + except Exception: + logger.exception("[Tencent APM] Failed to create span: %s", span_data.name) + + # Metrics recording API + def record_llm_duration(self, latency_seconds: float, attributes: dict[str, str] | None = None) -> None: + """Record LLM operation duration histogram in seconds.""" + try: + if not hasattr(self, "hist_llm_duration") or self.hist_llm_duration is None: + return + attrs: dict[str, str] = {} + if attributes: + for k, v in attributes.items(): + attrs[k] = str(v) if not isinstance(v, (str, int, float, bool)) else v # type: ignore[assignment] + self.hist_llm_duration.record(latency_seconds, attrs) # type: ignore[attr-defined] + except Exception: + logger.debug("[Tencent APM] Failed to record LLM duration", exc_info=True) + + def _create_and_export_span(self, span_data: SpanData) -> None: + """Create span using OpenTelemetry Tracer API""" + try: + parent_context = None + if span_data.parent_span_id and span_data.parent_span_id in self.span_contexts: + parent_context = trace_api.set_span_in_context( + trace_api.NonRecordingSpan(self.span_contexts[span_data.parent_span_id]) + ) + + span = self.tracer.start_span( + name=span_data.name, + context=parent_context, + kind=SpanKind.INTERNAL, + start_time=span_data.start_time, + ) + self.span_contexts[span_data.span_id] = span.get_span_context() + + if span_data.attributes: + attributes: dict[str, AttributeValue] = {} + for key, value in span_data.attributes.items(): + if isinstance(value, (int, float, bool)): + attributes[key] = value + else: + attributes[key] = str(value) + span.set_attributes(attributes) + + if span_data.events: + for event in span_data.events: + span.add_event(event.name, event.attributes, event.timestamp) + + if span_data.status: + span.set_status(span_data.status) + + # Manually end span; do not use context manager to avoid double-end warnings + span.end(end_time=span_data.end_time) + + except Exception: + logger.exception("[Tencent APM] Error creating span: %s", span_data.name) + + def api_check(self) -> bool: + """Check API connectivity using socket connection test for gRPC endpoints""" + try: + # Resolve gRPC target consistently with exporters + _, _, host, port = self._resolve_grpc_target(self.endpoint) + + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.settimeout(5) + result = sock.connect_ex((host, port)) + sock.close() + + if result == 0: + logger.info("[Tencent APM] Endpoint %s:%s is accessible", host, port) + return True + else: + logger.warning("[Tencent APM] Endpoint %s:%s is not accessible", host, port) + if host in ["127.0.0.1", "localhost"]: + logger.info("[Tencent APM] Development environment detected, allowing config save") + return True + return False + + except Exception: + logger.exception("[Tencent APM] API check failed") + if "127.0.0.1" in self.endpoint or "localhost" in self.endpoint: + return True + return False + + def get_project_url(self) -> str: + """Get project console URL""" + return "https://console.cloud.tencent.com/apm" + + def shutdown(self) -> None: + """Shutdown the client and export remaining spans""" + try: + if self.span_processor: + logger.info("[Tencent APM] Flushing remaining spans before shutdown") + _ = self.span_processor.force_flush() + self.span_processor.shutdown() + + if self.tracer_provider: + self.tracer_provider.shutdown() + if self.metric_reader is not None: + try: + self.metric_reader.shutdown() # type: ignore[attr-defined] + except Exception: + pass + + except Exception: + logger.exception("[Tencent APM] Error during client shutdown") + + @staticmethod + def _resolve_grpc_target(endpoint: str, default_port: int = 4317) -> tuple[str, bool, str, int]: + """Normalize endpoint to gRPC target and security flag. + + Returns: + (grpc_endpoint, insecure, host, port) + """ + try: + if endpoint.startswith(("http://", "https://")): + parsed = urlparse(endpoint) + host = parsed.hostname or "localhost" + port = parsed.port or default_port + insecure = parsed.scheme == "http" + return f"{host}:{port}", insecure, host, port + + host = endpoint + port = default_port + if ":" in endpoint: + parts = endpoint.rsplit(":", 1) + host = parts[0] or "localhost" + try: + port = int(parts[1]) + except Exception: + port = default_port + + insecure = ("localhost" in host) or ("127.0.0.1" in host) + return f"{host}:{port}", insecure, host, port + except Exception: + host, port = "localhost", default_port + return f"{host}:{port}", True, host, port diff --git a/api/core/ops/tencent_trace/entities/__init__.py b/api/core/ops/tencent_trace/entities/__init__.py new file mode 100644 index 0000000000..b1602628ed --- /dev/null +++ b/api/core/ops/tencent_trace/entities/__init__.py @@ -0,0 +1 @@ +# Tencent trace entities module diff --git a/api/core/ops/tencent_trace/entities/tencent_semconv.py b/api/core/ops/tencent_trace/entities/tencent_semconv.py new file mode 100644 index 0000000000..5ea6eeacef --- /dev/null +++ b/api/core/ops/tencent_trace/entities/tencent_semconv.py @@ -0,0 +1,73 @@ +from enum import Enum + +# public +GEN_AI_SESSION_ID = "gen_ai.session.id" + +GEN_AI_USER_ID = "gen_ai.user.id" + +GEN_AI_USER_NAME = "gen_ai.user.name" + +GEN_AI_SPAN_KIND = "gen_ai.span.kind" + +GEN_AI_FRAMEWORK = "gen_ai.framework" + +GEN_AI_IS_ENTRY = "gen_ai.is_entry" # mark to count the LLM-related traces + +# Chain +INPUT_VALUE = "gen_ai.entity.input" + +OUTPUT_VALUE = "gen_ai.entity.output" + + +# Retriever +RETRIEVAL_QUERY = "retrieval.query" + +RETRIEVAL_DOCUMENT = "retrieval.document" + + +# GENERATION +GEN_AI_MODEL_NAME = "gen_ai.response.model" + +GEN_AI_PROVIDER = "gen_ai.provider.name" + + +GEN_AI_USAGE_INPUT_TOKENS = "gen_ai.usage.input_tokens" + +GEN_AI_USAGE_OUTPUT_TOKENS = "gen_ai.usage.output_tokens" + +GEN_AI_USAGE_TOTAL_TOKENS = "gen_ai.usage.total_tokens" + +GEN_AI_PROMPT_TEMPLATE_TEMPLATE = "gen_ai.prompt_template.template" + +GEN_AI_PROMPT_TEMPLATE_VARIABLE = "gen_ai.prompt_template.variable" + +GEN_AI_PROMPT = "gen_ai.prompt" + +GEN_AI_COMPLETION = "gen_ai.completion" + +GEN_AI_RESPONSE_FINISH_REASON = "gen_ai.response.finish_reason" + +# Tool +TOOL_NAME = "tool.name" + +TOOL_DESCRIPTION = "tool.description" + +TOOL_PARAMETERS = "tool.parameters" + +# Instrumentation Library +INSTRUMENTATION_NAME = "dify-sdk" +INSTRUMENTATION_VERSION = "0.1.0" +INSTRUMENTATION_LANGUAGE = "python" + + +# Metrics +LLM_OPERATION_DURATION = "gen_ai.client.operation.duration" + + +class GenAISpanKind(Enum): + WORKFLOW = "WORKFLOW" # OpenLLMetry + RETRIEVER = "RETRIEVER" # RAG + GENERATION = "GENERATION" # Langfuse + TOOL = "TOOL" # OpenLLMetry + AGENT = "AGENT" # OpenLLMetry + TASK = "TASK" # OpenLLMetry diff --git a/api/core/ops/tencent_trace/entities/tencent_trace_entity.py b/api/core/ops/tencent_trace/entities/tencent_trace_entity.py new file mode 100644 index 0000000000..428850f109 --- /dev/null +++ b/api/core/ops/tencent_trace/entities/tencent_trace_entity.py @@ -0,0 +1,21 @@ +from collections.abc import Sequence + +from opentelemetry import trace as trace_api +from opentelemetry.sdk.trace import Event +from opentelemetry.trace import Status, StatusCode +from pydantic import BaseModel, Field + + +class SpanData(BaseModel): + model_config = {"arbitrary_types_allowed": True} + + trace_id: int = Field(..., description="The unique identifier for the trace.") + parent_span_id: int | None = Field(None, description="The ID of the parent span, if any.") + span_id: int = Field(..., description="The unique identifier for this span.") + name: str = Field(..., description="The name of the span.") + attributes: dict[str, str] = Field(default_factory=dict, description="Attributes associated with the span.") + events: Sequence[Event] = Field(default_factory=list, description="Events recorded in the span.") + links: Sequence[trace_api.Link] = Field(default_factory=list, description="Links to other spans.") + status: Status = Field(default=Status(StatusCode.UNSET), description="The status of the span.") + start_time: int = Field(..., description="The start time of the span in nanoseconds.") + end_time: int = Field(..., description="The end time of the span in nanoseconds.") diff --git a/api/core/ops/tencent_trace/span_builder.py b/api/core/ops/tencent_trace/span_builder.py new file mode 100644 index 0000000000..5ba592290d --- /dev/null +++ b/api/core/ops/tencent_trace/span_builder.py @@ -0,0 +1,372 @@ +""" +Tencent APM Span Builder - handles all span construction logic +""" + +import json +import logging +from datetime import datetime + +from opentelemetry.trace import Status, StatusCode + +from core.ops.entities.trace_entity import ( + DatasetRetrievalTraceInfo, + MessageTraceInfo, + ToolTraceInfo, + WorkflowTraceInfo, +) +from core.ops.tencent_trace.entities.tencent_semconv import ( + GEN_AI_COMPLETION, + GEN_AI_FRAMEWORK, + GEN_AI_IS_ENTRY, + GEN_AI_MODEL_NAME, + GEN_AI_PROMPT, + GEN_AI_PROVIDER, + GEN_AI_RESPONSE_FINISH_REASON, + GEN_AI_SESSION_ID, + GEN_AI_SPAN_KIND, + GEN_AI_USAGE_INPUT_TOKENS, + GEN_AI_USAGE_OUTPUT_TOKENS, + GEN_AI_USAGE_TOTAL_TOKENS, + GEN_AI_USER_ID, + INPUT_VALUE, + OUTPUT_VALUE, + RETRIEVAL_DOCUMENT, + RETRIEVAL_QUERY, + TOOL_DESCRIPTION, + TOOL_NAME, + TOOL_PARAMETERS, + GenAISpanKind, +) +from core.ops.tencent_trace.entities.tencent_trace_entity import SpanData +from core.ops.tencent_trace.utils import TencentTraceUtils +from core.rag.models.document import Document +from core.workflow.entities.workflow_node_execution import ( + WorkflowNodeExecution, + WorkflowNodeExecutionMetadataKey, + WorkflowNodeExecutionStatus, +) + +logger = logging.getLogger(__name__) + + +class TencentSpanBuilder: + """Builder class for constructing different types of spans""" + + @staticmethod + def _get_time_nanoseconds(time_value: datetime | None) -> int: + """Convert datetime to nanoseconds for span creation.""" + return TencentTraceUtils.convert_datetime_to_nanoseconds(time_value) + + @staticmethod + def build_workflow_spans( + trace_info: WorkflowTraceInfo, trace_id: int, user_id: str, links: list | None = None + ) -> list[SpanData]: + """Build workflow-related spans""" + spans = [] + links = links or [] + + message_span_id = None + workflow_span_id = TencentTraceUtils.convert_to_span_id(trace_info.workflow_run_id, "workflow") + + if hasattr(trace_info, "metadata") and trace_info.metadata.get("conversation_id"): + message_span_id = TencentTraceUtils.convert_to_span_id(trace_info.workflow_run_id, "message") + + status = Status(StatusCode.OK) + if trace_info.error: + status = Status(StatusCode.ERROR, trace_info.error) + + if message_span_id: + message_span = TencentSpanBuilder._build_message_span( + trace_info, trace_id, message_span_id, user_id, status, links + ) + spans.append(message_span) + + workflow_span = TencentSpanBuilder._build_workflow_span( + trace_info, trace_id, workflow_span_id, message_span_id, user_id, status, links + ) + spans.append(workflow_span) + + return spans + + @staticmethod + def _build_message_span( + trace_info: WorkflowTraceInfo, trace_id: int, message_span_id: int, user_id: str, status: Status, links: list + ) -> SpanData: + """Build message span for chatflow""" + return SpanData( + trace_id=trace_id, + parent_span_id=None, + span_id=message_span_id, + name="message", + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_USER_ID: str(user_id), + GEN_AI_SPAN_KIND: GenAISpanKind.WORKFLOW.value, + GEN_AI_FRAMEWORK: "dify", + GEN_AI_IS_ENTRY: "true", + INPUT_VALUE: trace_info.workflow_run_inputs.get("sys.query", ""), + OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False), + }, + status=status, + links=links, + ) + + @staticmethod + def _build_workflow_span( + trace_info: WorkflowTraceInfo, + trace_id: int, + workflow_span_id: int, + message_span_id: int | None, + user_id: str, + status: Status, + links: list, + ) -> SpanData: + """Build workflow span""" + attributes = { + GEN_AI_USER_ID: str(user_id), + GEN_AI_SPAN_KIND: GenAISpanKind.WORKFLOW.value, + GEN_AI_FRAMEWORK: "dify", + INPUT_VALUE: json.dumps(trace_info.workflow_run_inputs, ensure_ascii=False), + OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False), + } + + if message_span_id is None: + attributes[GEN_AI_IS_ENTRY] = "true" + + return SpanData( + trace_id=trace_id, + parent_span_id=message_span_id, + span_id=workflow_span_id, + name="workflow", + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes=attributes, + status=status, + links=links, + ) + + @staticmethod + def build_workflow_llm_span( + trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + ) -> SpanData: + """Build LLM span for workflow nodes.""" + process_data = node_execution.process_data or {} + outputs = node_execution.outputs or {} + usage_data = process_data.get("usage", {}) if "usage" in process_data else outputs.get("usage", {}) + + return SpanData( + trace_id=trace_id, + parent_span_id=workflow_span_id, + span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"), + name="GENERATION", + start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at), + end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_SPAN_KIND: GenAISpanKind.GENERATION.value, + GEN_AI_FRAMEWORK: "dify", + GEN_AI_MODEL_NAME: process_data.get("model_name", ""), + GEN_AI_PROVIDER: process_data.get("model_provider", ""), + GEN_AI_USAGE_INPUT_TOKENS: str(usage_data.get("prompt_tokens", 0)), + GEN_AI_USAGE_OUTPUT_TOKENS: str(usage_data.get("completion_tokens", 0)), + GEN_AI_USAGE_TOTAL_TOKENS: str(usage_data.get("total_tokens", 0)), + GEN_AI_PROMPT: json.dumps(process_data.get("prompts", []), ensure_ascii=False), + GEN_AI_COMPLETION: str(outputs.get("text", "")), + GEN_AI_RESPONSE_FINISH_REASON: outputs.get("finish_reason", ""), + INPUT_VALUE: json.dumps(process_data.get("prompts", []), ensure_ascii=False), + OUTPUT_VALUE: str(outputs.get("text", "")), + }, + status=TencentSpanBuilder._get_workflow_node_status(node_execution), + ) + + @staticmethod + def build_message_span( + trace_info: MessageTraceInfo, trace_id: int, user_id: str, links: list | None = None + ) -> SpanData: + """Build message span.""" + links = links or [] + status = Status(StatusCode.OK) + if trace_info.error: + status = Status(StatusCode.ERROR, trace_info.error) + + return SpanData( + trace_id=trace_id, + parent_span_id=None, + span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message"), + name="message", + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_USER_ID: str(user_id), + GEN_AI_SPAN_KIND: GenAISpanKind.WORKFLOW.value, + GEN_AI_FRAMEWORK: "dify", + GEN_AI_IS_ENTRY: "true", + INPUT_VALUE: str(trace_info.inputs or ""), + OUTPUT_VALUE: str(trace_info.outputs or ""), + }, + status=status, + links=links, + ) + + @staticmethod + def build_tool_span(trace_info: ToolTraceInfo, trace_id: int, parent_span_id: int) -> SpanData: + """Build tool span.""" + status = Status(StatusCode.OK) + if trace_info.error: + status = Status(StatusCode.ERROR, trace_info.error) + + return SpanData( + trace_id=trace_id, + parent_span_id=parent_span_id, + span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "tool"), + name=trace_info.tool_name, + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes={ + GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value, + GEN_AI_FRAMEWORK: "dify", + TOOL_NAME: trace_info.tool_name, + TOOL_DESCRIPTION: "", + TOOL_PARAMETERS: json.dumps(trace_info.tool_parameters, ensure_ascii=False), + INPUT_VALUE: json.dumps(trace_info.tool_inputs, ensure_ascii=False), + OUTPUT_VALUE: str(trace_info.tool_outputs), + }, + status=status, + ) + + @staticmethod + def build_retrieval_span(trace_info: DatasetRetrievalTraceInfo, trace_id: int, parent_span_id: int) -> SpanData: + """Build dataset retrieval span.""" + status = Status(StatusCode.OK) + if getattr(trace_info, "error", None): + status = Status(StatusCode.ERROR, trace_info.error) # type: ignore[arg-type] + + documents_data = TencentSpanBuilder._extract_retrieval_documents(trace_info.documents) + + return SpanData( + trace_id=trace_id, + parent_span_id=parent_span_id, + span_id=TencentTraceUtils.convert_to_span_id(trace_info.message_id, "retrieval"), + name="retrieval", + start_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.start_time), + end_time=TencentSpanBuilder._get_time_nanoseconds(trace_info.end_time), + attributes={ + GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value, + GEN_AI_FRAMEWORK: "dify", + RETRIEVAL_QUERY: str(trace_info.inputs or ""), + RETRIEVAL_DOCUMENT: json.dumps(documents_data, ensure_ascii=False), + INPUT_VALUE: str(trace_info.inputs or ""), + OUTPUT_VALUE: json.dumps(documents_data, ensure_ascii=False), + }, + status=status, + ) + + @staticmethod + def _get_workflow_node_status(node_execution: WorkflowNodeExecution) -> Status: + """Get workflow node execution status.""" + if node_execution.status == WorkflowNodeExecutionStatus.SUCCEEDED: + return Status(StatusCode.OK) + elif node_execution.status in [WorkflowNodeExecutionStatus.FAILED, WorkflowNodeExecutionStatus.EXCEPTION]: + return Status(StatusCode.ERROR, str(node_execution.error)) + return Status(StatusCode.UNSET) + + @staticmethod + def build_workflow_retrieval_span( + trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + ) -> SpanData: + """Build knowledge retrieval span for workflow nodes.""" + input_value = "" + if node_execution.inputs: + input_value = str(node_execution.inputs.get("query", "")) + output_value = "" + if node_execution.outputs: + output_value = json.dumps(node_execution.outputs.get("result", []), ensure_ascii=False) + + return SpanData( + trace_id=trace_id, + parent_span_id=workflow_span_id, + span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"), + name=node_execution.title, + start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at), + end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_SPAN_KIND: GenAISpanKind.RETRIEVER.value, + GEN_AI_FRAMEWORK: "dify", + RETRIEVAL_QUERY: input_value, + RETRIEVAL_DOCUMENT: output_value, + INPUT_VALUE: input_value, + OUTPUT_VALUE: output_value, + }, + status=TencentSpanBuilder._get_workflow_node_status(node_execution), + ) + + @staticmethod + def build_workflow_tool_span( + trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + ) -> SpanData: + """Build tool span for workflow nodes.""" + tool_des = {} + if node_execution.metadata: + tool_des = node_execution.metadata.get(WorkflowNodeExecutionMetadataKey.TOOL_INFO, {}) + + return SpanData( + trace_id=trace_id, + parent_span_id=workflow_span_id, + span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"), + name=node_execution.title, + start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at), + end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at), + attributes={ + GEN_AI_SPAN_KIND: GenAISpanKind.TOOL.value, + GEN_AI_FRAMEWORK: "dify", + TOOL_NAME: node_execution.title, + TOOL_DESCRIPTION: json.dumps(tool_des, ensure_ascii=False), + TOOL_PARAMETERS: json.dumps(node_execution.inputs or {}, ensure_ascii=False), + INPUT_VALUE: json.dumps(node_execution.inputs or {}, ensure_ascii=False), + OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False), + }, + status=TencentSpanBuilder._get_workflow_node_status(node_execution), + ) + + @staticmethod + def build_workflow_task_span( + trace_id: int, workflow_span_id: int, trace_info: WorkflowTraceInfo, node_execution: WorkflowNodeExecution + ) -> SpanData: + """Build generic task span for workflow nodes.""" + return SpanData( + trace_id=trace_id, + parent_span_id=workflow_span_id, + span_id=TencentTraceUtils.convert_to_span_id(node_execution.id, "node"), + name=node_execution.title, + start_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.created_at), + end_time=TencentSpanBuilder._get_time_nanoseconds(node_execution.finished_at), + attributes={ + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_SPAN_KIND: GenAISpanKind.TASK.value, + GEN_AI_FRAMEWORK: "dify", + INPUT_VALUE: json.dumps(node_execution.inputs, ensure_ascii=False), + OUTPUT_VALUE: json.dumps(node_execution.outputs, ensure_ascii=False), + }, + status=TencentSpanBuilder._get_workflow_node_status(node_execution), + ) + + @staticmethod + def _extract_retrieval_documents(documents: list[Document]): + """Extract documents data for retrieval tracing.""" + documents_data = [] + for document in documents: + document_data = { + "content": document.page_content, + "metadata": { + "dataset_id": document.metadata.get("dataset_id"), + "doc_id": document.metadata.get("doc_id"), + "document_id": document.metadata.get("document_id"), + }, + "score": document.metadata.get("score"), + } + documents_data.append(document_data) + return documents_data diff --git a/api/core/ops/tencent_trace/tencent_trace.py b/api/core/ops/tencent_trace/tencent_trace.py new file mode 100644 index 0000000000..5ef1c61b24 --- /dev/null +++ b/api/core/ops/tencent_trace/tencent_trace.py @@ -0,0 +1,317 @@ +""" +Tencent APM tracing implementation with separated concerns +""" + +import logging + +from sqlalchemy import select +from sqlalchemy.orm import Session, sessionmaker + +from core.ops.base_trace_instance import BaseTraceInstance +from core.ops.entities.config_entity import TencentConfig +from core.ops.entities.trace_entity import ( + BaseTraceInfo, + DatasetRetrievalTraceInfo, + GenerateNameTraceInfo, + MessageTraceInfo, + ModerationTraceInfo, + SuggestedQuestionTraceInfo, + ToolTraceInfo, + WorkflowTraceInfo, +) +from core.ops.tencent_trace.client import TencentTraceClient +from core.ops.tencent_trace.entities.tencent_trace_entity import SpanData +from core.ops.tencent_trace.span_builder import TencentSpanBuilder +from core.ops.tencent_trace.utils import TencentTraceUtils +from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository +from core.workflow.entities.workflow_node_execution import ( + WorkflowNodeExecution, +) +from core.workflow.nodes import NodeType +from extensions.ext_database import db +from models import Account, App, TenantAccountJoin, WorkflowNodeExecutionTriggeredFrom + +logger = logging.getLogger(__name__) + + +class TencentDataTrace(BaseTraceInstance): + """ + Tencent APM trace implementation with single responsibility principle. + Acts as a coordinator that delegates specific tasks to specialized classes. + """ + + def __init__(self, tencent_config: TencentConfig): + super().__init__(tencent_config) + self.trace_client = TencentTraceClient( + service_name=tencent_config.service_name, + endpoint=tencent_config.endpoint, + token=tencent_config.token, + metrics_export_interval_sec=5, + ) + + def trace(self, trace_info: BaseTraceInfo) -> None: + """Main tracing entry point - coordinates different trace types.""" + if isinstance(trace_info, WorkflowTraceInfo): + self.workflow_trace(trace_info) + elif isinstance(trace_info, MessageTraceInfo): + self.message_trace(trace_info) + elif isinstance(trace_info, ModerationTraceInfo): + pass + elif isinstance(trace_info, SuggestedQuestionTraceInfo): + self.suggested_question_trace(trace_info) + elif isinstance(trace_info, DatasetRetrievalTraceInfo): + self.dataset_retrieval_trace(trace_info) + elif isinstance(trace_info, ToolTraceInfo): + self.tool_trace(trace_info) + elif isinstance(trace_info, GenerateNameTraceInfo): + pass + + def api_check(self) -> bool: + return self.trace_client.api_check() + + def get_project_url(self) -> str: + return self.trace_client.get_project_url() + + def workflow_trace(self, trace_info: WorkflowTraceInfo) -> None: + """Handle workflow tracing by coordinating data retrieval and span construction.""" + try: + trace_id = TencentTraceUtils.convert_to_trace_id(trace_info.workflow_run_id) + + links = [] + if trace_info.trace_id: + links.append(TencentTraceUtils.create_link(trace_info.trace_id)) + + user_id = self._get_user_id(trace_info) + + workflow_spans = TencentSpanBuilder.build_workflow_spans(trace_info, trace_id, str(user_id), links) + + for span in workflow_spans: + self.trace_client.add_span(span) + + self._process_workflow_nodes(trace_info, trace_id) + + except Exception: + logger.exception("[Tencent APM] Failed to process workflow trace") + + def message_trace(self, trace_info: MessageTraceInfo) -> None: + """Handle message tracing.""" + try: + trace_id = TencentTraceUtils.convert_to_trace_id(trace_info.message_id) + user_id = self._get_user_id(trace_info) + + links = [] + if trace_info.trace_id: + links.append(TencentTraceUtils.create_link(trace_info.trace_id)) + + message_span = TencentSpanBuilder.build_message_span(trace_info, trace_id, str(user_id), links) + + self.trace_client.add_span(message_span) + + except Exception: + logger.exception("[Tencent APM] Failed to process message trace") + + def tool_trace(self, trace_info: ToolTraceInfo) -> None: + """Handle tool tracing.""" + try: + parent_span_id = None + trace_root_id = None + + if trace_info.message_id: + parent_span_id = TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message") + trace_root_id = trace_info.message_id + + if parent_span_id and trace_root_id: + trace_id = TencentTraceUtils.convert_to_trace_id(trace_root_id) + + tool_span = TencentSpanBuilder.build_tool_span(trace_info, trace_id, parent_span_id) + + self.trace_client.add_span(tool_span) + + except Exception: + logger.exception("[Tencent APM] Failed to process tool trace") + + def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo) -> None: + """Handle dataset retrieval tracing.""" + try: + parent_span_id = None + trace_root_id = None + + if trace_info.message_id: + parent_span_id = TencentTraceUtils.convert_to_span_id(trace_info.message_id, "message") + trace_root_id = trace_info.message_id + + if parent_span_id and trace_root_id: + trace_id = TencentTraceUtils.convert_to_trace_id(trace_root_id) + + retrieval_span = TencentSpanBuilder.build_retrieval_span(trace_info, trace_id, parent_span_id) + + self.trace_client.add_span(retrieval_span) + + except Exception: + logger.exception("[Tencent APM] Failed to process dataset retrieval trace") + + def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo) -> None: + """Handle suggested question tracing""" + try: + logger.info("[Tencent APM] Processing suggested question trace") + + except Exception: + logger.exception("[Tencent APM] Failed to process suggested question trace") + + def _process_workflow_nodes(self, trace_info: WorkflowTraceInfo, trace_id: int) -> None: + """Process workflow node executions.""" + try: + workflow_span_id = TencentTraceUtils.convert_to_span_id(trace_info.workflow_run_id, "workflow") + + node_executions = self._get_workflow_node_executions(trace_info) + + for node_execution in node_executions: + try: + node_span = self._build_workflow_node_span(node_execution, trace_id, trace_info, workflow_span_id) + if node_span: + self.trace_client.add_span(node_span) + + if node_execution.node_type == NodeType.LLM: + self._record_llm_metrics(node_execution) + except Exception: + logger.exception("[Tencent APM] Failed to process node execution: %s", node_execution.id) + + except Exception: + logger.exception("[Tencent APM] Failed to process workflow nodes") + + def _build_workflow_node_span( + self, node_execution: WorkflowNodeExecution, trace_id: int, trace_info: WorkflowTraceInfo, workflow_span_id: int + ) -> SpanData | None: + """Build span for different node types""" + try: + if node_execution.node_type == NodeType.LLM: + return TencentSpanBuilder.build_workflow_llm_span( + trace_id, workflow_span_id, trace_info, node_execution + ) + elif node_execution.node_type == NodeType.KNOWLEDGE_RETRIEVAL: + return TencentSpanBuilder.build_workflow_retrieval_span( + trace_id, workflow_span_id, trace_info, node_execution + ) + elif node_execution.node_type == NodeType.TOOL: + return TencentSpanBuilder.build_workflow_tool_span( + trace_id, workflow_span_id, trace_info, node_execution + ) + else: + # Handle all other node types as generic tasks + return TencentSpanBuilder.build_workflow_task_span( + trace_id, workflow_span_id, trace_info, node_execution + ) + except Exception: + logger.debug( + "[Tencent APM] Error building span for node %s: %s", + node_execution.id, + node_execution.node_type, + exc_info=True, + ) + return None + + def _get_workflow_node_executions(self, trace_info: WorkflowTraceInfo) -> list[WorkflowNodeExecution]: + """Retrieve workflow node executions from database.""" + try: + session_maker = sessionmaker(bind=db.engine) + + with Session(db.engine, expire_on_commit=False) as session: + app_id = trace_info.metadata.get("app_id") + if not app_id: + raise ValueError("No app_id found in trace_info metadata") + + app_stmt = select(App).where(App.id == app_id) + app = session.scalar(app_stmt) + if not app: + raise ValueError(f"App with id {app_id} not found") + + if not app.created_by: + raise ValueError(f"App with id {app_id} has no creator") + + account_stmt = select(Account).where(Account.id == app.created_by) + service_account = session.scalar(account_stmt) + if not service_account: + raise ValueError(f"Creator account not found for app {app_id}") + + current_tenant = ( + session.query(TenantAccountJoin).filter_by(account_id=service_account.id, current=True).first() + ) + if not current_tenant: + raise ValueError(f"Current tenant not found for account {service_account.id}") + + service_account.set_tenant_id(current_tenant.tenant_id) + + repository = SQLAlchemyWorkflowNodeExecutionRepository( + session_factory=session_maker, + user=service_account, + app_id=trace_info.metadata.get("app_id"), + triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN, + ) + + executions = repository.get_by_workflow_run(workflow_run_id=trace_info.workflow_run_id) + return list(executions) + + except Exception: + logger.exception("[Tencent APM] Failed to get workflow node executions") + return [] + + def _get_user_id(self, trace_info: BaseTraceInfo) -> str: + """Get user ID from trace info.""" + try: + tenant_id = None + user_id = None + + if isinstance(trace_info, (WorkflowTraceInfo, GenerateNameTraceInfo)): + tenant_id = trace_info.tenant_id + + if hasattr(trace_info, "metadata") and trace_info.metadata: + user_id = trace_info.metadata.get("user_id") + + if user_id and tenant_id: + stmt = ( + select(Account.name) + .join(TenantAccountJoin, Account.id == TenantAccountJoin.account_id) + .where(Account.id == user_id, TenantAccountJoin.tenant_id == tenant_id) + ) + + session_maker = sessionmaker(bind=db.engine) + with session_maker() as session: + account_name = session.scalar(stmt) + return account_name or str(user_id) + elif user_id: + return str(user_id) + + return "anonymous" + + except Exception: + logger.exception("[Tencent APM] Failed to get user ID") + return "unknown" + + def _record_llm_metrics(self, node_execution: WorkflowNodeExecution) -> None: + """Record LLM performance metrics""" + try: + if not hasattr(self.trace_client, "record_llm_duration"): + return + + process_data = node_execution.process_data or {} + usage = process_data.get("usage", {}) + latency_s = float(usage.get("latency", 0.0)) + + if latency_s > 0: + attributes = { + "provider": process_data.get("model_provider", ""), + "model": process_data.get("model_name", ""), + "span_kind": "GENERATION", + } + self.trace_client.record_llm_duration(latency_s, attributes) + + except Exception: + logger.debug("[Tencent APM] Failed to record LLM metrics") + + def __del__(self): + """Ensure proper cleanup on garbage collection.""" + try: + if hasattr(self, "trace_client"): + self.trace_client.shutdown() + except Exception: + pass diff --git a/api/core/ops/tencent_trace/utils.py b/api/core/ops/tencent_trace/utils.py new file mode 100644 index 0000000000..96087951ab --- /dev/null +++ b/api/core/ops/tencent_trace/utils.py @@ -0,0 +1,65 @@ +""" +Utility functions for Tencent APM tracing +""" + +import hashlib +import random +import uuid +from datetime import datetime +from typing import cast + +from opentelemetry.trace import Link, SpanContext, TraceFlags + + +class TencentTraceUtils: + """Utility class for common tracing operations.""" + + INVALID_SPAN_ID = 0x0000000000000000 + INVALID_TRACE_ID = 0x00000000000000000000000000000000 + + @staticmethod + def convert_to_trace_id(uuid_v4: str | None) -> int: + try: + uuid_obj = uuid.UUID(uuid_v4) if uuid_v4 else uuid.uuid4() + except Exception as e: + raise ValueError(f"Invalid UUID input: {e}") + return cast(int, uuid_obj.int) + + @staticmethod + def convert_to_span_id(uuid_v4: str | None, span_type: str) -> int: + try: + uuid_obj = uuid.UUID(uuid_v4) if uuid_v4 else uuid.uuid4() + except Exception as e: + raise ValueError(f"Invalid UUID input: {e}") + combined_key = f"{uuid_obj.hex}-{span_type}" + hash_bytes = hashlib.sha256(combined_key.encode("utf-8")).digest() + return int.from_bytes(hash_bytes[:8], byteorder="big", signed=False) + + @staticmethod + def generate_span_id() -> int: + span_id = random.getrandbits(64) + while span_id == TencentTraceUtils.INVALID_SPAN_ID: + span_id = random.getrandbits(64) + return span_id + + @staticmethod + def convert_datetime_to_nanoseconds(start_time: datetime | None) -> int: + if start_time is None: + start_time = datetime.now() + timestamp_in_seconds = start_time.timestamp() + return int(timestamp_in_seconds * 1e9) + + @staticmethod + def create_link(trace_id_str: str) -> Link: + try: + trace_id = int(trace_id_str, 16) if len(trace_id_str) == 32 else cast(int, uuid.UUID(trace_id_str).int) + except (ValueError, TypeError): + trace_id = cast(int, uuid.uuid4().int) + + span_context = SpanContext( + trace_id=trace_id, + span_id=TencentTraceUtils.INVALID_SPAN_ID, + is_remote=False, + trace_flags=TraceFlags(TraceFlags.SAMPLED), + ) + return Link(span_context) diff --git a/api/core/plugin/backwards_invocation/app.py b/api/core/plugin/backwards_invocation/app.py index 8b08b09eb9..32ac132e1e 100644 --- a/api/core/plugin/backwards_invocation/app.py +++ b/api/core/plugin/backwards_invocation/app.py @@ -14,7 +14,7 @@ from core.app.apps.workflow.app_generator import WorkflowAppGenerator from core.app.entities.app_invoke_entities import InvokeFrom from core.plugin.backwards_invocation.base import BaseBackwardsInvocation from extensions.ext_database import db -from models.account import Account +from models import Account from models.model import App, AppMode, EndUser diff --git a/api/core/plugin/impl/base.py b/api/core/plugin/impl/base.py index 952fefdbbc..5095b46432 100644 --- a/api/core/plugin/impl/base.py +++ b/api/core/plugin/impl/base.py @@ -2,7 +2,7 @@ import inspect import json import logging from collections.abc import Callable, Generator -from typing import Any, TypeVar +from typing import Any, TypeVar, cast import httpx from pydantic import BaseModel @@ -31,6 +31,17 @@ from core.plugin.impl.exc import ( ) plugin_daemon_inner_api_baseurl = URL(str(dify_config.PLUGIN_DAEMON_URL)) +_plugin_daemon_timeout_config = cast( + float | httpx.Timeout | None, + getattr(dify_config, "PLUGIN_DAEMON_TIMEOUT", 300.0), +) +plugin_daemon_request_timeout: httpx.Timeout | None +if _plugin_daemon_timeout_config is None: + plugin_daemon_request_timeout = None +elif isinstance(_plugin_daemon_timeout_config, httpx.Timeout): + plugin_daemon_request_timeout = _plugin_daemon_timeout_config +else: + plugin_daemon_request_timeout = httpx.Timeout(_plugin_daemon_timeout_config) T = TypeVar("T", bound=(BaseModel | dict | list | bool | str)) @@ -58,6 +69,7 @@ class BasePluginClient: "headers": headers, "params": params, "files": files, + "timeout": plugin_daemon_request_timeout, } if isinstance(prepared_data, dict): request_kwargs["data"] = prepared_data @@ -116,6 +128,7 @@ class BasePluginClient: "headers": headers, "params": params, "files": files, + "timeout": plugin_daemon_request_timeout, } if isinstance(prepared_data, dict): stream_kwargs["data"] = prepared_data diff --git a/api/core/prompt/advanced_prompt_transform.py b/api/core/prompt/advanced_prompt_transform.py index 5f2ffefd94..d74b2bddf5 100644 --- a/api/core/prompt/advanced_prompt_transform.py +++ b/api/core/prompt/advanced_prompt_transform.py @@ -18,7 +18,7 @@ from core.model_runtime.entities.message_entities import ImagePromptMessageConte from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate, MemoryConfig from core.prompt.prompt_transform import PromptTransform from core.prompt.utils.prompt_template_parser import PromptTemplateParser -from core.workflow.entities.variable_pool import VariablePool +from core.workflow.runtime import VariablePool class AdvancedPromptTransform(PromptTransform): diff --git a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py index 8820c0a846..d2d8fcf964 100644 --- a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py +++ b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py @@ -1,9 +1,24 @@ +""" +Weaviate vector database implementation for Dify's RAG system. + +This module provides integration with Weaviate vector database for storing and retrieving +document embeddings used in retrieval-augmented generation workflows. +""" + import datetime import json +import logging +import uuid as _uuid from typing import Any +from urllib.parse import urlparse -import weaviate # type: ignore +import weaviate +import weaviate.classes.config as wc from pydantic import BaseModel, model_validator +from weaviate.classes.data import DataObject +from weaviate.classes.init import Auth +from weaviate.classes.query import Filter, MetadataQuery +from weaviate.exceptions import UnexpectedStatusCodeError from configs import dify_config from core.rag.datasource.vdb.field import Field @@ -15,265 +30,396 @@ from core.rag.models.document import Document from extensions.ext_redis import redis_client from models.dataset import Dataset +logger = logging.getLogger(__name__) + class WeaviateConfig(BaseModel): + """ + Configuration model for Weaviate connection settings. + + Attributes: + endpoint: Weaviate server endpoint URL + api_key: Optional API key for authentication + batch_size: Number of objects to batch per insert operation + """ + endpoint: str api_key: str | None = None batch_size: int = 100 @model_validator(mode="before") @classmethod - def validate_config(cls, values: dict): + def validate_config(cls, values: dict) -> dict: + """Validates that required configuration values are present.""" if not values["endpoint"]: raise ValueError("config WEAVIATE_ENDPOINT is required") return values class WeaviateVector(BaseVector): + """ + Weaviate vector database implementation for document storage and retrieval. + + Handles creation, insertion, deletion, and querying of document embeddings + in a Weaviate collection. + """ + def __init__(self, collection_name: str, config: WeaviateConfig, attributes: list): + """ + Initializes the Weaviate vector store. + + Args: + collection_name: Name of the Weaviate collection + config: Weaviate configuration settings + attributes: List of metadata attributes to store + """ super().__init__(collection_name) self._client = self._init_client(config) self._attributes = attributes - def _init_client(self, config: WeaviateConfig) -> weaviate.Client: - auth_config = weaviate.AuthApiKey(api_key=config.api_key or "") + def _init_client(self, config: WeaviateConfig) -> weaviate.WeaviateClient: + """ + Initializes and returns a connected Weaviate client. - weaviate.connect.connection.has_grpc = False # ty: ignore [unresolved-attribute] + Configures both HTTP and gRPC connections with proper authentication. + """ + p = urlparse(config.endpoint) + host = p.hostname or config.endpoint.replace("https://", "").replace("http://", "") + http_secure = p.scheme == "https" + http_port = p.port or (443 if http_secure else 80) - try: - client = weaviate.Client( - url=config.endpoint, auth_client_secret=auth_config, timeout_config=(5, 60), startup_period=None - ) - except Exception as exc: - raise ConnectionError("Vector database connection error") from exc + grpc_host = host + grpc_secure = http_secure + grpc_port = 443 if grpc_secure else 50051 - client.batch.configure( - # `batch_size` takes an `int` value to enable auto-batching - # (`None` is used for manual batching) - batch_size=config.batch_size, - # dynamically update the `batch_size` based on import speed - dynamic=True, - # `timeout_retries` takes an `int` value to retry on time outs - timeout_retries=3, + client = weaviate.connect_to_custom( + http_host=host, + http_port=http_port, + http_secure=http_secure, + grpc_host=grpc_host, + grpc_port=grpc_port, + grpc_secure=grpc_secure, + auth_credentials=Auth.api_key(config.api_key) if config.api_key else None, ) + if not client.is_ready(): + raise ConnectionError("Vector database is not ready") + return client def get_type(self) -> str: + """Returns the vector database type identifier.""" return VectorType.WEAVIATE def get_collection_name(self, dataset: Dataset) -> str: + """ + Retrieves or generates the collection name for a dataset. + + Uses existing index structure if available, otherwise generates from dataset ID. + """ if dataset.index_struct_dict: class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] if not class_prefix.endswith("_Node"): - # original class_prefix class_prefix += "_Node" - return class_prefix dataset_id = dataset.id return Dataset.gen_collection_name_by_id(dataset_id) - def to_index_struct(self): + def to_index_struct(self) -> dict: + """Returns the index structure dictionary for persistence.""" return {"type": self.get_type(), "vector_store": {"class_prefix": self._collection_name}} def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): - # create collection + """ + Creates a new collection and adds initial documents with embeddings. + """ self._create_collection() - # create vector self.add_texts(texts, embeddings) def _create_collection(self): + """ + Creates the Weaviate collection with required schema if it doesn't exist. + + Uses Redis locking to prevent concurrent creation attempts. + """ lock_name = f"vector_indexing_lock_{self._collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = f"vector_indexing_{self._collection_name}" - if redis_client.get(collection_exist_cache_key): + cache_key = f"vector_indexing_{self._collection_name}" + if redis_client.get(cache_key): return - schema = self._default_schema(self._collection_name) - if not self._client.schema.contains(schema): - # create collection - self._client.schema.create_class(schema) - redis_client.set(collection_exist_cache_key, 1, ex=3600) + + try: + if not self._client.collections.exists(self._collection_name): + self._client.collections.create( + name=self._collection_name, + properties=[ + wc.Property( + name=Field.TEXT_KEY.value, + data_type=wc.DataType.TEXT, + tokenization=wc.Tokenization.WORD, + ), + wc.Property(name="document_id", data_type=wc.DataType.TEXT), + wc.Property(name="doc_id", data_type=wc.DataType.TEXT), + wc.Property(name="chunk_index", data_type=wc.DataType.INT), + ], + vector_config=wc.Configure.Vectors.self_provided(), + ) + + self._ensure_properties() + redis_client.set(cache_key, 1, ex=3600) + except Exception as e: + logger.exception("Error creating collection %s", self._collection_name) + raise + + def _ensure_properties(self) -> None: + """ + Ensures all required properties exist in the collection schema. + + Adds missing properties if the collection exists but lacks them. + """ + if not self._client.collections.exists(self._collection_name): + return + + col = self._client.collections.use(self._collection_name) + cfg = col.config.get() + existing = {p.name for p in (cfg.properties or [])} + + to_add = [] + if "document_id" not in existing: + to_add.append(wc.Property(name="document_id", data_type=wc.DataType.TEXT)) + if "doc_id" not in existing: + to_add.append(wc.Property(name="doc_id", data_type=wc.DataType.TEXT)) + if "chunk_index" not in existing: + to_add.append(wc.Property(name="chunk_index", data_type=wc.DataType.INT)) + + for prop in to_add: + try: + col.config.add_property(prop) + except Exception as e: + logger.warning("Could not add property %s: %s", prop.name, e) + + def _get_uuids(self, documents: list[Document]) -> list[str]: + """ + Generates deterministic UUIDs for documents based on their content. + + Uses UUID5 with URL namespace to ensure consistent IDs for identical content. + """ + URL_NAMESPACE = _uuid.UUID("6ba7b811-9dad-11d1-80b4-00c04fd430c8") + + uuids = [] + for doc in documents: + uuid_val = _uuid.uuid5(URL_NAMESPACE, doc.page_content) + uuids.append(str(uuid_val)) + + return uuids def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + """ + Adds documents with their embeddings to the collection. + + Batches insertions for efficiency and returns the list of inserted object IDs. + """ uuids = self._get_uuids(documents) texts = [d.page_content for d in documents] metadatas = [d.metadata for d in documents] - ids = [] + col = self._client.collections.use(self._collection_name) + objs: list[DataObject] = [] + ids_out: list[str] = [] - with self._client.batch as batch: - for i, text in enumerate(texts): - data_properties = {Field.TEXT_KEY: text} - if metadatas is not None: - # metadata maybe None - for key, val in (metadatas[i] or {}).items(): - data_properties[key] = self._json_serializable(val) + for i, text in enumerate(texts): + props: dict[str, Any] = {Field.TEXT_KEY.value: text} + meta = metadatas[i] or {} + for k, v in meta.items(): + props[k] = self._json_serializable(v) - batch.add_data_object( - data_object=data_properties, - class_name=self._collection_name, - uuid=uuids[i], - vector=embeddings[i] if embeddings else None, + candidate = uuids[i] if uuids else None + uid = candidate if (candidate and self._is_uuid(candidate)) else str(_uuid.uuid4()) + ids_out.append(uid) + + vec_payload = None + if embeddings and i < len(embeddings) and embeddings[i]: + vec_payload = {"default": embeddings[i]} + + objs.append( + DataObject( + uuid=uid, + properties=props, # type: ignore[arg-type] # mypy incorrectly infers DataObject signature + vector=vec_payload, ) - ids.append(uuids[i]) - return ids + ) - def delete_by_metadata_field(self, key: str, value: str): - # check whether the index already exists - schema = self._default_schema(self._collection_name) - if self._client.schema.contains(schema): - where_filter = {"operator": "Equal", "path": [key], "valueText": value} + with col.batch.dynamic() as batch: + for obj in objs: + batch.add_object(properties=obj.properties, uuid=obj.uuid, vector=obj.vector) - self._client.batch.delete_objects(class_name=self._collection_name, where=where_filter, output="minimal") + return ids_out + + def _is_uuid(self, val: str) -> bool: + """Validates whether a string is a valid UUID format.""" + try: + _uuid.UUID(str(val)) + return True + except Exception: + return False + + def delete_by_metadata_field(self, key: str, value: str) -> None: + """Deletes all objects matching a specific metadata field value.""" + if not self._client.collections.exists(self._collection_name): + return + + col = self._client.collections.use(self._collection_name) + col.data.delete_many(where=Filter.by_property(key).equal(value)) def delete(self): - # check whether the index already exists - schema = self._default_schema(self._collection_name) - if self._client.schema.contains(schema): - self._client.schema.delete_class(self._collection_name) + """Deletes the entire collection from Weaviate.""" + if self._client.collections.exists(self._collection_name): + self._client.collections.delete(self._collection_name) def text_exists(self, id: str) -> bool: - collection_name = self._collection_name - schema = self._default_schema(self._collection_name) - - # check whether the index already exists - if not self._client.schema.contains(schema): + """Checks if a document with the given doc_id exists in the collection.""" + if not self._client.collections.exists(self._collection_name): return False - result = ( - self._client.query.get(collection_name) - .with_additional(["id"]) - .with_where( - { - "path": ["doc_id"], - "operator": "Equal", - "valueText": id, - } - ) - .with_limit(1) - .do() + + col = self._client.collections.use(self._collection_name) + res = col.query.fetch_objects( + filters=Filter.by_property("doc_id").equal(id), + limit=1, + return_properties=["doc_id"], ) - if "errors" in result: - raise ValueError(f"Error during query: {result['errors']}") + return len(res.objects) > 0 - entries = result["data"]["Get"][collection_name] - if len(entries) == 0: - return False + def delete_by_ids(self, ids: list[str]) -> None: + """ + Deletes objects by their UUID identifiers. - return True + Silently ignores 404 errors for non-existent IDs. + """ + if not self._client.collections.exists(self._collection_name): + return - def delete_by_ids(self, ids: list[str]): - # check whether the index already exists - schema = self._default_schema(self._collection_name) - if self._client.schema.contains(schema): - for uuid in ids: - try: - self._client.data_object.delete( - class_name=self._collection_name, - uuid=uuid, - ) - except weaviate.UnexpectedStatusCodeException as e: - # tolerate not found error - if e.status_code != 404: - raise e + col = self._client.collections.use(self._collection_name) + + for uid in ids: + try: + col.data.delete_by_id(uid) + except UnexpectedStatusCodeError as e: + if getattr(e, "status_code", None) != 404: + raise def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: - """Look up similar documents by embedding vector in Weaviate.""" - collection_name = self._collection_name - properties = self._attributes - properties.append(Field.TEXT_KEY) - query_obj = self._client.query.get(collection_name, properties) + """ + Performs vector similarity search using the provided query vector. - vector = {"vector": query_vector} - document_ids_filter = kwargs.get("document_ids_filter") - if document_ids_filter: - operands = [] - for document_id_filter in document_ids_filter: - operands.append({"path": ["document_id"], "operator": "Equal", "valueText": document_id_filter}) - where_filter = {"operator": "Or", "operands": operands} - query_obj = query_obj.with_where(where_filter) - result = ( - query_obj.with_near_vector(vector) - .with_limit(kwargs.get("top_k", 4)) - .with_additional(["vector", "distance"]) - .do() + Filters by document IDs if provided and applies score threshold. + Returns documents sorted by relevance score. + """ + if not self._client.collections.exists(self._collection_name): + return [] + + col = self._client.collections.use(self._collection_name) + props = list({*self._attributes, "document_id", Field.TEXT_KEY.value}) + + where = None + doc_ids = kwargs.get("document_ids_filter") or [] + if doc_ids: + ors = [Filter.by_property("document_id").equal(x) for x in doc_ids] + where = ors[0] + for f in ors[1:]: + where = where | f + + top_k = int(kwargs.get("top_k", 4)) + score_threshold = float(kwargs.get("score_threshold") or 0.0) + + res = col.query.near_vector( + near_vector=query_vector, + limit=top_k, + return_properties=props, + return_metadata=MetadataQuery(distance=True), + include_vector=False, + filters=where, + target_vector="default", ) - if "errors" in result: - raise ValueError(f"Error during query: {result['errors']}") - docs_and_scores = [] - for res in result["data"]["Get"][collection_name]: - text = res.pop(Field.TEXT_KEY) - score = 1 - res["_additional"]["distance"] - docs_and_scores.append((Document(page_content=text, metadata=res), score)) + docs: list[Document] = [] + for obj in res.objects: + properties = dict(obj.properties or {}) + text = properties.pop(Field.TEXT_KEY.value, "") + if obj.metadata and obj.metadata.distance is not None: + distance = obj.metadata.distance + else: + distance = 1.0 + score = 1.0 - distance - docs = [] - for doc, score in docs_and_scores: - score_threshold = float(kwargs.get("score_threshold") or 0.0) - # check score threshold - if score >= score_threshold: - if doc.metadata is not None: - doc.metadata["score"] = score - docs.append(doc) - # Sort the documents by score in descending order - docs = sorted(docs, key=lambda x: x.metadata.get("score", 0) if x.metadata else 0, reverse=True) + if score > score_threshold: + properties["score"] = score + docs.append(Document(page_content=text, metadata=properties)) + + docs.sort(key=lambda d: d.metadata.get("score", 0.0), reverse=True) return docs def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: - """Return docs using BM25F. - - Args: - query: Text to look up documents similar to. - - Returns: - List of Documents most similar to the query. """ - collection_name = self._collection_name - content: dict[str, Any] = {"concepts": [query]} - properties = self._attributes - properties.append(Field.TEXT_KEY) - if kwargs.get("search_distance"): - content["certainty"] = kwargs.get("search_distance") - query_obj = self._client.query.get(collection_name, properties) - document_ids_filter = kwargs.get("document_ids_filter") - if document_ids_filter: - operands = [] - for document_id_filter in document_ids_filter: - operands.append({"path": ["document_id"], "operator": "Equal", "valueText": document_id_filter}) - where_filter = {"operator": "Or", "operands": operands} - query_obj = query_obj.with_where(where_filter) - query_obj = query_obj.with_additional(["vector"]) - properties = ["text"] - result = query_obj.with_bm25(query=query, properties=properties).with_limit(kwargs.get("top_k", 4)).do() - if "errors" in result: - raise ValueError(f"Error during query: {result['errors']}") - docs = [] - for res in result["data"]["Get"][collection_name]: - text = res.pop(Field.TEXT_KEY) - additional = res.pop("_additional") - docs.append(Document(page_content=text, vector=additional["vector"], metadata=res)) + Performs BM25 full-text search on document content. + + Filters by document IDs if provided and returns matching documents with vectors. + """ + if not self._client.collections.exists(self._collection_name): + return [] + + col = self._client.collections.use(self._collection_name) + props = list({*self._attributes, Field.TEXT_KEY.value}) + + where = None + doc_ids = kwargs.get("document_ids_filter") or [] + if doc_ids: + ors = [Filter.by_property("document_id").equal(x) for x in doc_ids] + where = ors[0] + for f in ors[1:]: + where = where | f + + top_k = int(kwargs.get("top_k", 4)) + + res = col.query.bm25( + query=query, + query_properties=[Field.TEXT_KEY.value], + limit=top_k, + return_properties=props, + include_vector=True, + filters=where, + ) + + docs: list[Document] = [] + for obj in res.objects: + properties = dict(obj.properties or {}) + text = properties.pop(Field.TEXT_KEY.value, "") + + vec = obj.vector + if isinstance(vec, dict): + vec = vec.get("default") or next(iter(vec.values()), None) + + docs.append(Document(page_content=text, vector=vec, metadata=properties)) return docs - def _default_schema(self, index_name: str): - return { - "class": index_name, - "properties": [ - { - "name": "text", - "dataType": ["text"], - } - ], - } - - def _json_serializable(self, value: Any): + def _json_serializable(self, value: Any) -> Any: + """Converts values to JSON-serializable format, handling datetime objects.""" if isinstance(value, datetime.datetime): return value.isoformat() return value class WeaviateVectorFactory(AbstractVectorFactory): + """Factory class for creating WeaviateVector instances.""" + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> WeaviateVector: + """ + Initializes a WeaviateVector instance for the given dataset. + + Uses existing collection name from dataset index structure or generates a new one. + Updates dataset index structure if not already set. + """ if dataset.index_struct_dict: class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] collection_name = class_prefix @@ -281,7 +427,6 @@ class WeaviateVectorFactory(AbstractVectorFactory): dataset_id = dataset.id collection_name = Dataset.gen_collection_name_by_id(dataset_id) dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.WEAVIATE, collection_name)) - return WeaviateVector( collection_name=collection_name, config=WeaviateConfig( diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py index c2f17cd148..937b8f033c 100644 --- a/api/core/rag/embedding/cached_embedding.py +++ b/api/core/rag/embedding/cached_embedding.py @@ -43,8 +43,7 @@ class CacheEmbedding(Embeddings): else: embedding_queue_indices.append(i) - # release database connection, because embedding may take a long time - db.session.close() + # NOTE: avoid closing the shared scoped session here; downstream code may still have pending work if embedding_queue_indices: embedding_queue_texts = [texts[i] for i in embedding_queue_indices] diff --git a/api/core/rag/extractor/firecrawl/firecrawl_app.py b/api/core/rag/extractor/firecrawl/firecrawl_app.py index c20ecd2b89..789ac8557d 100644 --- a/api/core/rag/extractor/firecrawl/firecrawl_app.py +++ b/api/core/rag/extractor/firecrawl/firecrawl_app.py @@ -25,7 +25,7 @@ class FirecrawlApp: } if params: json_data.update(params) - response = self._post_request(f"{self.base_url}/v1/scrape", json_data, headers) + response = self._post_request(f"{self.base_url}/v2/scrape", json_data, headers) if response.status_code == 200: response_data = response.json() data = response_data["data"] @@ -42,7 +42,7 @@ class FirecrawlApp: json_data = {"url": url} if params: json_data.update(params) - response = self._post_request(f"{self.base_url}/v1/crawl", json_data, headers) + response = self._post_request(f"{self.base_url}/v2/crawl", json_data, headers) if response.status_code == 200: # There's also another two fields in the response: "success" (bool) and "url" (str) job_id = response.json().get("id") @@ -51,9 +51,25 @@ class FirecrawlApp: self._handle_error(response, "start crawl job") return "" # unreachable + def map(self, url: str, params: dict[str, Any] | None = None) -> dict[str, Any]: + # Documentation: https://docs.firecrawl.dev/api-reference/endpoint/map + headers = self._prepare_headers() + json_data: dict[str, Any] = {"url": url, "integration": "dify"} + if params: + # Pass through provided params, including optional "sitemap": "only" | "include" | "skip" + json_data.update(params) + response = self._post_request(f"{self.base_url}/v2/map", json_data, headers) + if response.status_code == 200: + return cast(dict[str, Any], response.json()) + elif response.status_code in {402, 409, 500, 429, 408}: + self._handle_error(response, "start map job") + return {} + else: + raise Exception(f"Failed to start map job. Status code: {response.status_code}") + def check_crawl_status(self, job_id) -> dict[str, Any]: headers = self._prepare_headers() - response = self._get_request(f"{self.base_url}/v1/crawl/{job_id}", headers) + response = self._get_request(f"{self.base_url}/v2/crawl/{job_id}", headers) if response.status_code == 200: crawl_status_response = response.json() if crawl_status_response.get("status") == "completed": @@ -135,12 +151,16 @@ class FirecrawlApp: "lang": "en", "country": "us", "timeout": 60000, - "ignoreInvalidURLs": False, + "ignoreInvalidURLs": True, "scrapeOptions": {}, + "sources": [ + {"type": "web"}, + ], + "integration": "dify", } if params: json_data.update(params) - response = self._post_request(f"{self.base_url}/v1/search", json_data, headers) + response = self._post_request(f"{self.base_url}/v2/search", json_data, headers) if response.status_code == 200: response_data = response.json() if not response_data.get("success"): diff --git a/api/core/repositories/celery_workflow_execution_repository.py b/api/core/repositories/celery_workflow_execution_repository.py index eda7b54d6a..460bb75722 100644 --- a/api/core/repositories/celery_workflow_execution_repository.py +++ b/api/core/repositories/celery_workflow_execution_repository.py @@ -108,7 +108,7 @@ class CeleryWorkflowExecutionRepository(WorkflowExecutionRepository): execution_data = execution.model_dump() # Queue the save operation as a Celery task (fire and forget) - save_workflow_execution_task.delay( + save_workflow_execution_task.delay( # type: ignore execution_data=execution_data, tenant_id=self._tenant_id, app_id=self._app_id or "", diff --git a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py index 4399ec01cc..4436773d25 100644 --- a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py +++ b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py @@ -104,7 +104,6 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER # Initialize in-memory cache for node executions - # Key: node_execution_id, Value: WorkflowNodeExecution (DB model) self._node_execution_cache: dict[str, WorkflowNodeExecutionModel] = {} # Initialize FileService for handling offloaded data @@ -332,17 +331,10 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) Args: execution: The NodeExecution domain entity to persist """ - # NOTE: As per the implementation of `WorkflowCycleManager`, - # the `save` method is invoked multiple times during the node's execution lifecycle, including: - # - # - When the node starts execution - # - When the node retries execution - # - When the node completes execution (either successfully or with failure) - # - # Only the final invocation will have `inputs` and `outputs` populated. - # - # This simplifies the logic for saving offloaded variables but introduces a tight coupling - # between this module and `WorkflowCycleManager`. + # NOTE: The workflow engine triggers `save` multiple times for a single node execution: + # when the node starts, any time it retries, and once more when it reaches a terminal state. + # Only the final call contains the complete inputs and outputs payloads, so earlier invocations + # must tolerate missing data without attempting to offload variables. # Convert domain model to database model using tenant context and other attributes db_model = self._to_db_model(execution) diff --git a/api/core/tools/custom_tool/tool.py b/api/core/tools/custom_tool/tool.py index f18f638f2d..54c266ffcc 100644 --- a/api/core/tools/custom_tool/tool.py +++ b/api/core/tools/custom_tool/tool.py @@ -395,11 +395,13 @@ class ApiTool(Tool): parsed_response = self.validate_and_parse_response(response) # assemble invoke message based on response type - if parsed_response.is_json and isinstance(parsed_response.content, dict): - yield self.create_json_message(parsed_response.content) + if parsed_response.is_json: + if isinstance(parsed_response.content, dict): + yield self.create_json_message(parsed_response.content) - # FIXES: https://github.com/langgenius/dify/pull/23456#issuecomment-3182413088 - # We need never break the original flows + # The yield below must be preserved to keep backward compatibility. + # + # ref: https://github.com/langgenius/dify/pull/23456#issuecomment-3182413088 yield self.create_text_message(response.text) else: # Convert to string if needed and create text message diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 62e3aa8b5d..15a4f0aafd 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -189,6 +189,11 @@ class ToolInvokeMessage(BaseModel): data: Mapping[str, Any] = Field(..., description="Detailed log data") metadata: Mapping[str, Any] = Field(default_factory=dict, description="The metadata of the log") + @field_validator("metadata", mode="before") + @classmethod + def _normalize_metadata(cls, value: Mapping[str, Any] | None) -> Mapping[str, Any]: + return value or {} + class RetrieverResourceMessage(BaseModel): retriever_resources: list[RetrievalSourceMetadata] = Field(..., description="retriever resources") context: str = Field(..., description="context") @@ -376,6 +381,11 @@ class ToolEntity(BaseModel): def set_parameters(cls, v, validation_info: ValidationInfo) -> list[ToolParameter]: return v or [] + @field_validator("output_schema", mode="before") + @classmethod + def _normalize_output_schema(cls, value: Mapping[str, object] | None) -> Mapping[str, object]: + return value or {} + class OAuthSchema(BaseModel): client_schema: list[ProviderConfig] = Field( diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index af68971ca7..006cf856d5 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -58,8 +58,8 @@ from services.tools.mcp_tools_manage_service import MCPToolManageService from services.tools.tools_transform_service import ToolTransformService if TYPE_CHECKING: - from core.workflow.entities import VariablePool from core.workflow.nodes.tool.entities import ToolEntity + from core.workflow.runtime import VariablePool logger = logging.getLogger(__name__) diff --git a/api/core/tools/utils/message_transformer.py b/api/core/tools/utils/message_transformer.py index 0851a54338..ca2aa39861 100644 --- a/api/core/tools/utils/message_transformer.py +++ b/api/core/tools/utils/message_transformer.py @@ -12,7 +12,7 @@ from core.file import File, FileTransferMethod, FileType from core.tools.entities.tool_entities import ToolInvokeMessage from core.tools.tool_file_manager import ToolFileManager from libs.login import current_user -from models.account import Account +from models import Account logger = logging.getLogger(__name__) diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index 5adf04611d..50c2327004 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -3,6 +3,7 @@ import logging from collections.abc import Generator from typing import Any +from flask import has_request_context from sqlalchemy import select from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod @@ -18,7 +19,8 @@ from core.tools.errors import ToolInvokeError from extensions.ext_database import db from factories.file_factory import build_from_mapping from libs.login import current_user -from models.model import App +from models import Account, Tenant +from models.model import App, EndUser from models.workflow import Workflow logger = logging.getLogger(__name__) @@ -79,11 +81,16 @@ class WorkflowTool(Tool): generator = WorkflowAppGenerator() assert self.runtime is not None assert self.runtime.invoke_from is not None - assert current_user is not None + + user = self._resolve_user(user_id=user_id) + + if user is None: + raise ToolInvokeError("User not found") + result = generator.generate( app_model=app, workflow=workflow, - user=current_user, + user=user, args={"inputs": tool_parameters, "files": files}, invoke_from=self.runtime.invoke_from, streaming=False, @@ -123,6 +130,51 @@ class WorkflowTool(Tool): label=self.label, ) + def _resolve_user(self, user_id: str) -> Account | EndUser | None: + """ + Resolve user object in both HTTP and worker contexts. + + In HTTP context: dereference the current_user LocalProxy (can return Account or EndUser). + In worker context: load Account from database by user_id (only returns Account, never EndUser). + + Returns: + Account | EndUser | None: The resolved user object, or None if resolution fails. + """ + if has_request_context(): + return self._resolve_user_from_request() + else: + return self._resolve_user_from_database(user_id=user_id) + + def _resolve_user_from_request(self) -> Account | EndUser | None: + """ + Resolve user from Flask request context. + """ + try: + # Note: `current_user` is a LocalProxy. Never compare it with None directly. + return getattr(current_user, "_get_current_object", lambda: current_user)() + except Exception as e: + logger.warning("Failed to resolve user from request context: %s", e) + return None + + def _resolve_user_from_database(self, user_id: str) -> Account | None: + """ + Resolve user from database (worker/Celery context). + """ + + user_stmt = select(Account).where(Account.id == user_id) + user = db.session.scalar(user_stmt) + if not user: + return None + + tenant_stmt = select(Tenant).where(Tenant.id == self.runtime.tenant_id) + tenant = db.session.scalar(tenant_stmt) + if not tenant: + return None + + user.current_tenant = tenant + + return user + def _get_workflow(self, app_id: str, version: str) -> Workflow: """ get the workflow by app id and version diff --git a/api/core/workflow/entities/__init__.py b/api/core/workflow/entities/__init__.py index 007bf42aa6..be70e467a0 100644 --- a/api/core/workflow/entities/__init__.py +++ b/api/core/workflow/entities/__init__.py @@ -1,18 +1,11 @@ from .agent import AgentNodeStrategyInit from .graph_init_params import GraphInitParams -from .graph_runtime_state import GraphRuntimeState -from .run_condition import RunCondition -from .variable_pool import VariablePool, VariableValue from .workflow_execution import WorkflowExecution from .workflow_node_execution import WorkflowNodeExecution __all__ = [ "AgentNodeStrategyInit", "GraphInitParams", - "GraphRuntimeState", - "RunCondition", - "VariablePool", - "VariableValue", "WorkflowExecution", "WorkflowNodeExecution", ] diff --git a/api/core/workflow/entities/graph_runtime_state.py b/api/core/workflow/entities/graph_runtime_state.py deleted file mode 100644 index 6362f291ea..0000000000 --- a/api/core/workflow/entities/graph_runtime_state.py +++ /dev/null @@ -1,160 +0,0 @@ -from copy import deepcopy - -from pydantic import BaseModel, PrivateAttr - -from core.model_runtime.entities.llm_entities import LLMUsage - -from .variable_pool import VariablePool - - -class GraphRuntimeState(BaseModel): - # Private attributes to prevent direct modification - _variable_pool: VariablePool = PrivateAttr() - _start_at: float = PrivateAttr() - _total_tokens: int = PrivateAttr(default=0) - _llm_usage: LLMUsage = PrivateAttr(default_factory=LLMUsage.empty_usage) - _outputs: dict[str, object] = PrivateAttr(default_factory=dict[str, object]) - _node_run_steps: int = PrivateAttr(default=0) - _ready_queue_json: str = PrivateAttr() - _graph_execution_json: str = PrivateAttr() - _response_coordinator_json: str = PrivateAttr() - - def __init__( - self, - *, - variable_pool: VariablePool, - start_at: float, - total_tokens: int = 0, - llm_usage: LLMUsage | None = None, - outputs: dict[str, object] | None = None, - node_run_steps: int = 0, - ready_queue_json: str = "", - graph_execution_json: str = "", - response_coordinator_json: str = "", - **kwargs: object, - ): - """Initialize the GraphRuntimeState with validation.""" - super().__init__(**kwargs) - - # Initialize private attributes with validation - self._variable_pool = variable_pool - - self._start_at = start_at - - if total_tokens < 0: - raise ValueError("total_tokens must be non-negative") - self._total_tokens = total_tokens - - if llm_usage is None: - llm_usage = LLMUsage.empty_usage() - self._llm_usage = llm_usage - - if outputs is None: - outputs = {} - self._outputs = deepcopy(outputs) - - if node_run_steps < 0: - raise ValueError("node_run_steps must be non-negative") - self._node_run_steps = node_run_steps - - self._ready_queue_json = ready_queue_json - self._graph_execution_json = graph_execution_json - self._response_coordinator_json = response_coordinator_json - - @property - def variable_pool(self) -> VariablePool: - """Get the variable pool.""" - return self._variable_pool - - @property - def start_at(self) -> float: - """Get the start time.""" - return self._start_at - - @start_at.setter - def start_at(self, value: float) -> None: - """Set the start time.""" - self._start_at = value - - @property - def total_tokens(self) -> int: - """Get the total tokens count.""" - return self._total_tokens - - @total_tokens.setter - def total_tokens(self, value: int): - """Set the total tokens count.""" - if value < 0: - raise ValueError("total_tokens must be non-negative") - self._total_tokens = value - - @property - def llm_usage(self) -> LLMUsage: - """Get the LLM usage info.""" - # Return a copy to prevent external modification - return self._llm_usage.model_copy() - - @llm_usage.setter - def llm_usage(self, value: LLMUsage): - """Set the LLM usage info.""" - self._llm_usage = value.model_copy() - - @property - def outputs(self) -> dict[str, object]: - """Get a copy of the outputs dictionary.""" - return deepcopy(self._outputs) - - @outputs.setter - def outputs(self, value: dict[str, object]) -> None: - """Set the outputs dictionary.""" - self._outputs = deepcopy(value) - - def set_output(self, key: str, value: object) -> None: - """Set a single output value.""" - self._outputs[key] = deepcopy(value) - - def get_output(self, key: str, default: object = None) -> object: - """Get a single output value.""" - return deepcopy(self._outputs.get(key, default)) - - def update_outputs(self, updates: dict[str, object]) -> None: - """Update multiple output values.""" - for key, value in updates.items(): - self._outputs[key] = deepcopy(value) - - @property - def node_run_steps(self) -> int: - """Get the node run steps count.""" - return self._node_run_steps - - @node_run_steps.setter - def node_run_steps(self, value: int) -> None: - """Set the node run steps count.""" - if value < 0: - raise ValueError("node_run_steps must be non-negative") - self._node_run_steps = value - - def increment_node_run_steps(self) -> None: - """Increment the node run steps by 1.""" - self._node_run_steps += 1 - - def add_tokens(self, tokens: int) -> None: - """Add tokens to the total count.""" - if tokens < 0: - raise ValueError("tokens must be non-negative") - self._total_tokens += tokens - - @property - def ready_queue_json(self) -> str: - """Get a copy of the ready queue state.""" - return self._ready_queue_json - - @property - def graph_execution_json(self) -> str: - """Get a copy of the serialized graph execution state.""" - return self._graph_execution_json - - @property - def response_coordinator_json(self) -> str: - """Get a copy of the serialized response coordinator state.""" - return self._response_coordinator_json diff --git a/api/core/workflow/entities/run_condition.py b/api/core/workflow/entities/run_condition.py deleted file mode 100644 index 7b9a379215..0000000000 --- a/api/core/workflow/entities/run_condition.py +++ /dev/null @@ -1,21 +0,0 @@ -import hashlib -from typing import Literal - -from pydantic import BaseModel - -from core.workflow.utils.condition.entities import Condition - - -class RunCondition(BaseModel): - type: Literal["branch_identify", "condition"] - """condition type""" - - branch_identify: str | None = None - """branch identify like: sourceHandle, required when type is branch_identify""" - - conditions: list[Condition] | None = None - """conditions to run the node, required when type is condition""" - - @property - def hash(self) -> str: - return hashlib.sha256(self.model_dump_json().encode()).hexdigest() diff --git a/api/core/workflow/enums.py b/api/core/workflow/enums.py index eb88bb67ee..83b9281e51 100644 --- a/api/core/workflow/enums.py +++ b/api/core/workflow/enums.py @@ -58,6 +58,7 @@ class NodeType(StrEnum): DOCUMENT_EXTRACTOR = "document-extractor" LIST_OPERATOR = "list-operator" AGENT = "agent" + HUMAN_INPUT = "human-input" class NodeExecutionType(StrEnum): @@ -96,6 +97,7 @@ class WorkflowExecutionStatus(StrEnum): FAILED = "failed" STOPPED = "stopped" PARTIAL_SUCCEEDED = "partial-succeeded" + PAUSED = "paused" class WorkflowNodeExecutionMetadataKey(StrEnum): diff --git a/api/core/workflow/graph/__init__.py b/api/core/workflow/graph/__init__.py index 31a81d494e..4830ea83d3 100644 --- a/api/core/workflow/graph/__init__.py +++ b/api/core/workflow/graph/__init__.py @@ -1,16 +1,11 @@ from .edge import Edge -from .graph import Graph, NodeFactory -from .graph_runtime_state_protocol import ReadOnlyGraphRuntimeState, ReadOnlyVariablePool +from .graph import Graph, GraphBuilder, NodeFactory from .graph_template import GraphTemplate -from .read_only_state_wrapper import ReadOnlyGraphRuntimeStateWrapper, ReadOnlyVariablePoolWrapper __all__ = [ "Edge", "Graph", + "GraphBuilder", "GraphTemplate", "NodeFactory", - "ReadOnlyGraphRuntimeState", - "ReadOnlyGraphRuntimeStateWrapper", - "ReadOnlyVariablePool", - "ReadOnlyVariablePoolWrapper", ] diff --git a/api/core/workflow/graph/graph.py b/api/core/workflow/graph/graph.py index 330e14de81..20b5193875 100644 --- a/api/core/workflow/graph/graph.py +++ b/api/core/workflow/graph/graph.py @@ -195,6 +195,12 @@ class Graph: return nodes + @classmethod + def new(cls) -> "GraphBuilder": + """Create a fluent builder for assembling a graph programmatically.""" + + return GraphBuilder(graph_cls=cls) + @classmethod def _mark_inactive_root_branches( cls, @@ -344,3 +350,96 @@ class Graph: """ edge_ids = self.in_edges.get(node_id, []) return [self.edges[eid] for eid in edge_ids if eid in self.edges] + + +@final +class GraphBuilder: + """Fluent helper for constructing simple graphs, primarily for tests.""" + + def __init__(self, *, graph_cls: type[Graph]): + self._graph_cls = graph_cls + self._nodes: list[Node] = [] + self._nodes_by_id: dict[str, Node] = {} + self._edges: list[Edge] = [] + self._edge_counter = 0 + + def add_root(self, node: Node) -> "GraphBuilder": + """Register the root node. Must be called exactly once.""" + + if self._nodes: + raise ValueError("Root node has already been added") + self._register_node(node) + self._nodes.append(node) + return self + + def add_node( + self, + node: Node, + *, + from_node_id: str | None = None, + source_handle: str = "source", + ) -> "GraphBuilder": + """Append a node and connect it from the specified predecessor.""" + + if not self._nodes: + raise ValueError("Root node must be added before adding other nodes") + + predecessor_id = from_node_id or self._nodes[-1].id + if predecessor_id not in self._nodes_by_id: + raise ValueError(f"Predecessor node '{predecessor_id}' not found") + + predecessor = self._nodes_by_id[predecessor_id] + self._register_node(node) + self._nodes.append(node) + + edge_id = f"edge_{self._edge_counter}" + self._edge_counter += 1 + edge = Edge(id=edge_id, tail=predecessor.id, head=node.id, source_handle=source_handle) + self._edges.append(edge) + + return self + + def connect(self, *, tail: str, head: str, source_handle: str = "source") -> "GraphBuilder": + """Connect two existing nodes without adding a new node.""" + + if tail not in self._nodes_by_id: + raise ValueError(f"Tail node '{tail}' not found") + if head not in self._nodes_by_id: + raise ValueError(f"Head node '{head}' not found") + + edge_id = f"edge_{self._edge_counter}" + self._edge_counter += 1 + edge = Edge(id=edge_id, tail=tail, head=head, source_handle=source_handle) + self._edges.append(edge) + + return self + + def build(self) -> Graph: + """Materialize the graph instance from the accumulated nodes and edges.""" + + if not self._nodes: + raise ValueError("Cannot build an empty graph") + + nodes = {node.id: node for node in self._nodes} + edges = {edge.id: edge for edge in self._edges} + in_edges: dict[str, list[str]] = defaultdict(list) + out_edges: dict[str, list[str]] = defaultdict(list) + + for edge in self._edges: + out_edges[edge.tail].append(edge.id) + in_edges[edge.head].append(edge.id) + + return self._graph_cls( + nodes=nodes, + edges=edges, + in_edges=dict(in_edges), + out_edges=dict(out_edges), + root_node=self._nodes[0], + ) + + def _register_node(self, node: Node) -> None: + if not node.id: + raise ValueError("Node must have a non-empty id") + if node.id in self._nodes_by_id: + raise ValueError(f"Duplicate node id detected: {node.id}") + self._nodes_by_id[node.id] = node diff --git a/api/core/workflow/graph_engine/command_channels/redis_channel.py b/api/core/workflow/graph_engine/command_channels/redis_channel.py index c841459170..4be3adb8f8 100644 --- a/api/core/workflow/graph_engine/command_channels/redis_channel.py +++ b/api/core/workflow/graph_engine/command_channels/redis_channel.py @@ -9,7 +9,7 @@ Each instance uses a unique key for its command queue. import json from typing import TYPE_CHECKING, Any, final -from ..entities.commands import AbortCommand, CommandType, GraphEngineCommand +from ..entities.commands import AbortCommand, CommandType, GraphEngineCommand, PauseCommand if TYPE_CHECKING: from extensions.ext_redis import RedisClientWrapper @@ -41,6 +41,7 @@ class RedisChannel: self._redis = redis_client self._key = channel_key self._command_ttl = command_ttl + self._pending_key = f"{channel_key}:pending" def fetch_commands(self) -> list[GraphEngineCommand]: """ @@ -49,6 +50,9 @@ class RedisChannel: Returns: List of pending commands (drains the Redis list) """ + if not self._has_pending_commands(): + return [] + commands: list[GraphEngineCommand] = [] # Use pipeline for atomic operations @@ -85,6 +89,7 @@ class RedisChannel: with self._redis.pipeline() as pipe: pipe.rpush(self._key, command_json) pipe.expire(self._key, self._command_ttl) + pipe.set(self._pending_key, "1", ex=self._command_ttl) pipe.execute() def _deserialize_command(self, data: dict[str, Any]) -> GraphEngineCommand | None: @@ -106,9 +111,25 @@ class RedisChannel: if command_type == CommandType.ABORT: return AbortCommand.model_validate(data) - else: - # For other command types, use base class - return GraphEngineCommand.model_validate(data) + if command_type == CommandType.PAUSE: + return PauseCommand.model_validate(data) + + # For other command types, use base class + return GraphEngineCommand.model_validate(data) except (ValueError, TypeError): return None + + def _has_pending_commands(self) -> bool: + """ + Check and consume the pending marker to avoid unnecessary list reads. + + Returns: + True if commands should be fetched from Redis. + """ + with self._redis.pipeline() as pipe: + pipe.get(self._pending_key) + pipe.delete(self._pending_key) + pending_value, _ = pipe.execute() + + return pending_value is not None diff --git a/api/core/workflow/graph_engine/command_processing/__init__.py b/api/core/workflow/graph_engine/command_processing/__init__.py index 3460b52226..837f5e55fd 100644 --- a/api/core/workflow/graph_engine/command_processing/__init__.py +++ b/api/core/workflow/graph_engine/command_processing/__init__.py @@ -5,10 +5,11 @@ This package handles external commands sent to the engine during execution. """ -from .command_handlers import AbortCommandHandler +from .command_handlers import AbortCommandHandler, PauseCommandHandler from .command_processor import CommandProcessor __all__ = [ "AbortCommandHandler", "CommandProcessor", + "PauseCommandHandler", ] diff --git a/api/core/workflow/graph_engine/command_processing/command_handlers.py b/api/core/workflow/graph_engine/command_processing/command_handlers.py index 3c51de99f3..c26c98c496 100644 --- a/api/core/workflow/graph_engine/command_processing/command_handlers.py +++ b/api/core/workflow/graph_engine/command_processing/command_handlers.py @@ -1,14 +1,10 @@ -""" -Command handler implementations. -""" - import logging from typing import final from typing_extensions import override from ..domain.graph_execution import GraphExecution -from ..entities.commands import AbortCommand, GraphEngineCommand +from ..entities.commands import AbortCommand, GraphEngineCommand, PauseCommand from .command_processor import CommandHandler logger = logging.getLogger(__name__) @@ -16,17 +12,17 @@ logger = logging.getLogger(__name__) @final class AbortCommandHandler(CommandHandler): - """Handles abort commands.""" - @override def handle(self, command: GraphEngineCommand, execution: GraphExecution) -> None: - """ - Handle an abort command. - - Args: - command: The abort command - execution: Graph execution to abort - """ assert isinstance(command, AbortCommand) logger.debug("Aborting workflow %s: %s", execution.workflow_id, command.reason) execution.abort(command.reason or "User requested abort") + + +@final +class PauseCommandHandler(CommandHandler): + @override + def handle(self, command: GraphEngineCommand, execution: GraphExecution) -> None: + assert isinstance(command, PauseCommand) + logger.debug("Pausing workflow %s: %s", execution.workflow_id, command.reason) + execution.pause(command.reason) diff --git a/api/core/workflow/graph_engine/domain/graph_execution.py b/api/core/workflow/graph_engine/domain/graph_execution.py index b273ee9969..6482c927d6 100644 --- a/api/core/workflow/graph_engine/domain/graph_execution.py +++ b/api/core/workflow/graph_engine/domain/graph_execution.py @@ -40,6 +40,8 @@ class GraphExecutionState(BaseModel): started: bool = Field(default=False) completed: bool = Field(default=False) aborted: bool = Field(default=False) + paused: bool = Field(default=False) + pause_reason: str | None = Field(default=None) error: GraphExecutionErrorState | None = Field(default=None) exceptions_count: int = Field(default=0) node_executions: list[NodeExecutionState] = Field(default_factory=list[NodeExecutionState]) @@ -103,6 +105,8 @@ class GraphExecution: started: bool = False completed: bool = False aborted: bool = False + paused: bool = False + pause_reason: str | None = None error: Exception | None = None node_executions: dict[str, NodeExecution] = field(default_factory=dict[str, NodeExecution]) exceptions_count: int = 0 @@ -126,6 +130,17 @@ class GraphExecution: self.aborted = True self.error = RuntimeError(f"Aborted: {reason}") + def pause(self, reason: str | None = None) -> None: + """Pause the graph execution without marking it complete.""" + if self.completed: + raise RuntimeError("Cannot pause execution that has completed") + if self.aborted: + raise RuntimeError("Cannot pause execution that has been aborted") + if self.paused: + return + self.paused = True + self.pause_reason = reason + def fail(self, error: Exception) -> None: """Mark the graph execution as failed.""" self.error = error @@ -140,7 +155,12 @@ class GraphExecution: @property def is_running(self) -> bool: """Check if the execution is currently running.""" - return self.started and not self.completed and not self.aborted + return self.started and not self.completed and not self.aborted and not self.paused + + @property + def is_paused(self) -> bool: + """Check if the execution is currently paused.""" + return self.paused @property def has_error(self) -> bool: @@ -173,6 +193,8 @@ class GraphExecution: started=self.started, completed=self.completed, aborted=self.aborted, + paused=self.paused, + pause_reason=self.pause_reason, error=_serialize_error(self.error), exceptions_count=self.exceptions_count, node_executions=node_states, @@ -197,6 +219,8 @@ class GraphExecution: self.started = state.started self.completed = state.completed self.aborted = state.aborted + self.paused = state.paused + self.pause_reason = state.pause_reason self.error = _deserialize_error(state.error) self.exceptions_count = state.exceptions_count self.node_executions = { diff --git a/api/core/workflow/graph_engine/entities/commands.py b/api/core/workflow/graph_engine/entities/commands.py index 123ef3d449..6070ed8812 100644 --- a/api/core/workflow/graph_engine/entities/commands.py +++ b/api/core/workflow/graph_engine/entities/commands.py @@ -16,7 +16,6 @@ class CommandType(StrEnum): ABORT = "abort" PAUSE = "pause" - RESUME = "resume" class GraphEngineCommand(BaseModel): @@ -31,3 +30,10 @@ class AbortCommand(GraphEngineCommand): command_type: CommandType = Field(default=CommandType.ABORT, description="Type of command") reason: str | None = Field(default=None, description="Optional reason for abort") + + +class PauseCommand(GraphEngineCommand): + """Command to pause a running workflow execution.""" + + command_type: CommandType = Field(default=CommandType.PAUSE, description="Type of command") + reason: str | None = Field(default=None, description="Optional reason for pause") diff --git a/api/core/workflow/graph_engine/event_management/event_handlers.py b/api/core/workflow/graph_engine/event_management/event_handlers.py index 7247b17967..fe99d3ad50 100644 --- a/api/core/workflow/graph_engine/event_management/event_handlers.py +++ b/api/core/workflow/graph_engine/event_management/event_handlers.py @@ -7,8 +7,8 @@ from collections.abc import Mapping from functools import singledispatchmethod from typing import TYPE_CHECKING, final -from core.workflow.entities import GraphRuntimeState -from core.workflow.enums import ErrorStrategy, NodeExecutionType +from core.model_runtime.entities.llm_entities import LLMUsage +from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeState from core.workflow.graph import Graph from core.workflow.graph_events import ( GraphNodeEventBase, @@ -23,11 +23,13 @@ from core.workflow.graph_events import ( NodeRunLoopNextEvent, NodeRunLoopStartedEvent, NodeRunLoopSucceededEvent, + NodeRunPauseRequestedEvent, NodeRunRetryEvent, NodeRunStartedEvent, NodeRunStreamChunkEvent, NodeRunSucceededEvent, ) +from core.workflow.runtime import GraphRuntimeState from ..domain.graph_execution import GraphExecution from ..response_coordinator import ResponseStreamCoordinator @@ -125,6 +127,7 @@ class EventHandler: node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) is_initial_attempt = node_execution.retry_count == 0 node_execution.mark_started(event.id) + self._graph_runtime_state.increment_node_run_steps() # Track in response coordinator for stream ordering self._response_coordinator.track_node_execution(event.node_id, event.id) @@ -163,6 +166,8 @@ class EventHandler: node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) node_execution.mark_taken() + self._accumulate_node_usage(event.node_run_result.llm_usage) + # Store outputs in variable pool self._store_node_outputs(event.node_id, event.node_run_result.outputs) @@ -199,6 +204,18 @@ class EventHandler: # Collect the event self._event_collector.collect(event) + @_dispatch.register + def _(self, event: NodeRunPauseRequestedEvent) -> None: + """Handle pause requests emitted by nodes.""" + + pause_reason = event.reason or "Awaiting human input" + self._graph_execution.pause(pause_reason) + self._state_manager.finish_execution(event.node_id) + if event.node_id in self._graph.nodes: + self._graph.nodes[event.node_id].state = NodeState.UNKNOWN + self._graph_runtime_state.register_paused_node(event.node_id) + self._event_collector.collect(event) + @_dispatch.register def _(self, event: NodeRunFailedEvent) -> None: """ @@ -212,6 +229,8 @@ class EventHandler: node_execution.mark_failed(event.error) self._graph_execution.record_node_failure() + self._accumulate_node_usage(event.node_run_result.llm_usage) + result = self._error_handler.handle_node_failure(event) if result: @@ -235,6 +254,8 @@ class EventHandler: node_execution = self._graph_execution.get_or_create_node_execution(event.node_id) node_execution.mark_taken() + self._accumulate_node_usage(event.node_run_result.llm_usage) + # Persist outputs produced by the exception strategy (e.g. default values) self._store_node_outputs(event.node_id, event.node_run_result.outputs) @@ -286,6 +307,19 @@ class EventHandler: self._state_manager.enqueue_node(event.node_id) self._state_manager.start_execution(event.node_id) + def _accumulate_node_usage(self, usage: LLMUsage) -> None: + """Accumulate token usage into the shared runtime state.""" + if usage.total_tokens <= 0: + return + + self._graph_runtime_state.add_tokens(usage.total_tokens) + + current_usage = self._graph_runtime_state.llm_usage + if current_usage.total_tokens == 0: + self._graph_runtime_state.llm_usage = usage + else: + self._graph_runtime_state.llm_usage = current_usage.plus(usage) + def _store_node_outputs(self, node_id: str, outputs: Mapping[str, object]) -> None: """ Store node outputs in the variable pool. diff --git a/api/core/workflow/graph_engine/event_management/event_manager.py b/api/core/workflow/graph_engine/event_management/event_manager.py index 751a2a4352..689cf53cf0 100644 --- a/api/core/workflow/graph_engine/event_management/event_manager.py +++ b/api/core/workflow/graph_engine/event_management/event_manager.py @@ -97,6 +97,10 @@ class EventManager: """ self._layers = layers + def notify_layers(self, event: GraphEngineEvent) -> None: + """Notify registered layers about an event without buffering it.""" + self._notify_layers(event) + def collect(self, event: GraphEngineEvent) -> None: """ Thread-safe method to collect an event. diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index a21fb7c022..dd2ca3f93b 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -9,28 +9,29 @@ import contextvars import logging import queue from collections.abc import Generator -from typing import final +from typing import TYPE_CHECKING, cast, final from flask import Flask, current_app -from core.workflow.entities import GraphRuntimeState from core.workflow.enums import NodeExecutionType from core.workflow.graph import Graph -from core.workflow.graph.read_only_state_wrapper import ReadOnlyGraphRuntimeStateWrapper -from core.workflow.graph_engine.ready_queue import InMemoryReadyQueue from core.workflow.graph_events import ( GraphEngineEvent, GraphNodeEventBase, GraphRunAbortedEvent, GraphRunFailedEvent, GraphRunPartialSucceededEvent, + GraphRunPausedEvent, GraphRunStartedEvent, GraphRunSucceededEvent, ) +from core.workflow.runtime import GraphRuntimeState, ReadOnlyGraphRuntimeStateWrapper -from .command_processing import AbortCommandHandler, CommandProcessor -from .domain import GraphExecution -from .entities.commands import AbortCommand +if TYPE_CHECKING: # pragma: no cover - used only for static analysis + from core.workflow.runtime.graph_runtime_state import GraphProtocol + +from .command_processing import AbortCommandHandler, CommandProcessor, PauseCommandHandler +from .entities.commands import AbortCommand, PauseCommand from .error_handler import ErrorHandler from .event_management import EventHandler, EventManager from .graph_state_manager import GraphStateManager @@ -38,10 +39,13 @@ from .graph_traversal import EdgeProcessor, SkipPropagator from .layers.base import GraphEngineLayer from .orchestration import Dispatcher, ExecutionCoordinator from .protocols.command_channel import CommandChannel -from .ready_queue import ReadyQueue, ReadyQueueState, create_ready_queue_from_state -from .response_coordinator import ResponseStreamCoordinator +from .ready_queue import ReadyQueue from .worker_management import WorkerPool +if TYPE_CHECKING: + from core.workflow.graph_engine.domain.graph_execution import GraphExecution + from core.workflow.graph_engine.response_coordinator import ResponseStreamCoordinator + logger = logging.getLogger(__name__) @@ -67,17 +71,16 @@ class GraphEngine: ) -> None: """Initialize the graph engine with all subsystems and dependencies.""" - # Graph execution tracks the overall execution state - self._graph_execution = GraphExecution(workflow_id=workflow_id) - if graph_runtime_state.graph_execution_json != "": - self._graph_execution.loads(graph_runtime_state.graph_execution_json) - - # === Core Dependencies === - # Graph structure and configuration + # Bind runtime state to current workflow context self._graph = graph self._graph_runtime_state = graph_runtime_state + self._graph_runtime_state.configure(graph=cast("GraphProtocol", graph)) self._command_channel = command_channel + # Graph execution tracks the overall execution state + self._graph_execution = cast("GraphExecution", self._graph_runtime_state.graph_execution) + self._graph_execution.workflow_id = workflow_id + # === Worker Management Parameters === # Parameters for dynamic worker pool scaling self._min_workers = min_workers @@ -86,13 +89,7 @@ class GraphEngine: self._scale_down_idle_time = scale_down_idle_time # === Execution Queues === - # Create ready queue from saved state or initialize new one - self._ready_queue: ReadyQueue - if self._graph_runtime_state.ready_queue_json == "": - self._ready_queue = InMemoryReadyQueue() - else: - ready_queue_state = ReadyQueueState.model_validate_json(self._graph_runtime_state.ready_queue_json) - self._ready_queue = create_ready_queue_from_state(ready_queue_state) + self._ready_queue = cast(ReadyQueue, self._graph_runtime_state.ready_queue) # Queue for events generated during execution self._event_queue: queue.Queue[GraphNodeEventBase] = queue.Queue() @@ -103,11 +100,7 @@ class GraphEngine: # === Response Coordination === # Coordinates response streaming from response nodes - self._response_coordinator = ResponseStreamCoordinator( - variable_pool=self._graph_runtime_state.variable_pool, graph=self._graph - ) - if graph_runtime_state.response_coordinator_json != "": - self._response_coordinator.loads(graph_runtime_state.response_coordinator_json) + self._response_coordinator = cast("ResponseStreamCoordinator", self._graph_runtime_state.response_coordinator) # === Event Management === # Event manager handles both collection and emission of events @@ -133,19 +126,6 @@ class GraphEngine: skip_propagator=self._skip_propagator, ) - # === Event Handler Registry === - # Central registry for handling all node execution events - self._event_handler_registry = EventHandler( - graph=self._graph, - graph_runtime_state=self._graph_runtime_state, - graph_execution=self._graph_execution, - response_coordinator=self._response_coordinator, - event_collector=self._event_manager, - edge_processor=self._edge_processor, - state_manager=self._state_manager, - error_handler=self._error_handler, - ) - # === Command Processing === # Processes external commands (e.g., abort requests) self._command_processor = CommandProcessor( @@ -153,12 +133,12 @@ class GraphEngine: graph_execution=self._graph_execution, ) - # Register abort command handler + # Register command handlers abort_handler = AbortCommandHandler() - self._command_processor.register_handler( - AbortCommand, - abort_handler, - ) + self._command_processor.register_handler(AbortCommand, abort_handler) + + pause_handler = PauseCommandHandler() + self._command_processor.register_handler(PauseCommand, pause_handler) # === Worker Pool Setup === # Capture Flask app context for worker threads @@ -191,12 +171,23 @@ class GraphEngine: self._execution_coordinator = ExecutionCoordinator( graph_execution=self._graph_execution, state_manager=self._state_manager, - event_handler=self._event_handler_registry, - event_collector=self._event_manager, command_processor=self._command_processor, worker_pool=self._worker_pool, ) + # === Event Handler Registry === + # Central registry for handling all node execution events + self._event_handler_registry = EventHandler( + graph=self._graph, + graph_runtime_state=self._graph_runtime_state, + graph_execution=self._graph_execution, + response_coordinator=self._response_coordinator, + event_collector=self._event_manager, + edge_processor=self._edge_processor, + state_manager=self._state_manager, + error_handler=self._error_handler, + ) + # Dispatches events and manages execution flow self._dispatcher = Dispatcher( event_queue=self._event_queue, @@ -237,26 +228,41 @@ class GraphEngine: # Initialize layers self._initialize_layers() - # Start execution - self._graph_execution.start() + is_resume = self._graph_execution.started + if not is_resume: + self._graph_execution.start() + else: + self._graph_execution.paused = False + self._graph_execution.pause_reason = None + start_event = GraphRunStartedEvent() + self._event_manager.notify_layers(start_event) yield start_event # Start subsystems - self._start_execution() + self._start_execution(resume=is_resume) # Yield events as they occur yield from self._event_manager.emit_events() # Handle completion - if self._graph_execution.aborted: + if self._graph_execution.is_paused: + paused_event = GraphRunPausedEvent( + reason=self._graph_execution.pause_reason, + outputs=self._graph_runtime_state.outputs, + ) + self._event_manager.notify_layers(paused_event) + yield paused_event + elif self._graph_execution.aborted: abort_reason = "Workflow execution aborted by user command" if self._graph_execution.error: abort_reason = str(self._graph_execution.error) - yield GraphRunAbortedEvent( + aborted_event = GraphRunAbortedEvent( reason=abort_reason, outputs=self._graph_runtime_state.outputs, ) + self._event_manager.notify_layers(aborted_event) + yield aborted_event elif self._graph_execution.has_error: if self._graph_execution.error: raise self._graph_execution.error @@ -264,20 +270,26 @@ class GraphEngine: outputs = self._graph_runtime_state.outputs exceptions_count = self._graph_execution.exceptions_count if exceptions_count > 0: - yield GraphRunPartialSucceededEvent( + partial_event = GraphRunPartialSucceededEvent( exceptions_count=exceptions_count, outputs=outputs, ) + self._event_manager.notify_layers(partial_event) + yield partial_event else: - yield GraphRunSucceededEvent( + succeeded_event = GraphRunSucceededEvent( outputs=outputs, ) + self._event_manager.notify_layers(succeeded_event) + yield succeeded_event except Exception as e: - yield GraphRunFailedEvent( + failed_event = GraphRunFailedEvent( error=str(e), exceptions_count=self._graph_execution.exceptions_count, ) + self._event_manager.notify_layers(failed_event) + yield failed_event raise finally: @@ -299,8 +311,12 @@ class GraphEngine: except Exception as e: logger.warning("Layer %s failed on_graph_start: %s", layer.__class__.__name__, e) - def _start_execution(self) -> None: + def _start_execution(self, *, resume: bool = False) -> None: """Start execution subsystems.""" + paused_nodes: list[str] = [] + if resume: + paused_nodes = self._graph_runtime_state.consume_paused_nodes() + # Start worker pool (it calculates initial workers internally) self._worker_pool.start() @@ -309,10 +325,15 @@ class GraphEngine: if node.execution_type == NodeExecutionType.RESPONSE: self._response_coordinator.register(node.id) - # Enqueue root node - root_node = self._graph.root_node - self._state_manager.enqueue_node(root_node.id) - self._state_manager.start_execution(root_node.id) + if not resume: + # Enqueue root node + root_node = self._graph.root_node + self._state_manager.enqueue_node(root_node.id) + self._state_manager.start_execution(root_node.id) + else: + for node_id in paused_nodes: + self._state_manager.enqueue_node(node_id) + self._state_manager.start_execution(node_id) # Start dispatcher self._dispatcher.start() diff --git a/api/core/workflow/graph_engine/layers/base.py b/api/core/workflow/graph_engine/layers/base.py index dfac49e11a..24c12c2934 100644 --- a/api/core/workflow/graph_engine/layers/base.py +++ b/api/core/workflow/graph_engine/layers/base.py @@ -7,9 +7,9 @@ intercept and respond to GraphEngine events. from abc import ABC, abstractmethod -from core.workflow.graph.graph_runtime_state_protocol import ReadOnlyGraphRuntimeState from core.workflow.graph_engine.protocols.command_channel import CommandChannel from core.workflow.graph_events import GraphEngineEvent +from core.workflow.runtime import ReadOnlyGraphRuntimeState class GraphEngineLayer(ABC): diff --git a/api/core/workflow/graph_engine/layers/persistence.py b/api/core/workflow/graph_engine/layers/persistence.py new file mode 100644 index 0000000000..ecd8e12ca5 --- /dev/null +++ b/api/core/workflow/graph_engine/layers/persistence.py @@ -0,0 +1,410 @@ +"""Workflow persistence layer for GraphEngine. + +This layer mirrors the former ``WorkflowCycleManager`` responsibilities by +listening to ``GraphEngineEvent`` instances directly and persisting workflow +and node execution state via the injected repositories. + +The design keeps domain persistence concerns inside the engine thread, while +allowing presentation layers to remain read-only observers of repository +state. +""" + +from collections.abc import Mapping +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Union + +from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity +from core.ops.entities.trace_entity import TraceTaskName +from core.ops.ops_trace_manager import TraceQueueManager, TraceTask +from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID +from core.workflow.entities import WorkflowExecution, WorkflowNodeExecution +from core.workflow.enums import ( + SystemVariableKey, + WorkflowExecutionStatus, + WorkflowNodeExecutionMetadataKey, + WorkflowNodeExecutionStatus, + WorkflowType, +) +from core.workflow.graph_engine.layers.base import GraphEngineLayer +from core.workflow.graph_events import ( + GraphEngineEvent, + GraphRunAbortedEvent, + GraphRunFailedEvent, + GraphRunPartialSucceededEvent, + GraphRunPausedEvent, + GraphRunStartedEvent, + GraphRunSucceededEvent, + NodeRunExceptionEvent, + NodeRunFailedEvent, + NodeRunPauseRequestedEvent, + NodeRunRetryEvent, + NodeRunStartedEvent, + NodeRunSucceededEvent, +) +from core.workflow.node_events import NodeRunResult +from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository +from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.workflow_entry import WorkflowEntry +from libs.datetime_utils import naive_utc_now + + +@dataclass(slots=True) +class PersistenceWorkflowInfo: + """Static workflow metadata required for persistence.""" + + workflow_id: str + workflow_type: WorkflowType + version: str + graph_data: Mapping[str, Any] + + +@dataclass(slots=True) +class _NodeRuntimeSnapshot: + """Lightweight cache to keep node metadata across event phases.""" + + node_id: str + title: str + predecessor_node_id: str | None + iteration_id: str | None + loop_id: str | None + created_at: datetime + + +class WorkflowPersistenceLayer(GraphEngineLayer): + """GraphEngine layer that persists workflow and node execution state.""" + + def __init__( + self, + *, + application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity], + workflow_info: PersistenceWorkflowInfo, + workflow_execution_repository: WorkflowExecutionRepository, + workflow_node_execution_repository: WorkflowNodeExecutionRepository, + trace_manager: TraceQueueManager | None = None, + ) -> None: + super().__init__() + self._application_generate_entity = application_generate_entity + self._workflow_info = workflow_info + self._workflow_execution_repository = workflow_execution_repository + self._workflow_node_execution_repository = workflow_node_execution_repository + self._trace_manager = trace_manager + + self._workflow_execution: WorkflowExecution | None = None + self._node_execution_cache: dict[str, WorkflowNodeExecution] = {} + self._node_snapshots: dict[str, _NodeRuntimeSnapshot] = {} + self._node_sequence: int = 0 + + # ------------------------------------------------------------------ + # GraphEngineLayer lifecycle + # ------------------------------------------------------------------ + def on_graph_start(self) -> None: + self._workflow_execution = None + self._node_execution_cache.clear() + self._node_snapshots.clear() + self._node_sequence = 0 + + def on_event(self, event: GraphEngineEvent) -> None: + if isinstance(event, GraphRunStartedEvent): + self._handle_graph_run_started() + return + + if isinstance(event, GraphRunSucceededEvent): + self._handle_graph_run_succeeded(event) + return + + if isinstance(event, GraphRunPartialSucceededEvent): + self._handle_graph_run_partial_succeeded(event) + return + + if isinstance(event, GraphRunFailedEvent): + self._handle_graph_run_failed(event) + return + + if isinstance(event, GraphRunAbortedEvent): + self._handle_graph_run_aborted(event) + return + + if isinstance(event, GraphRunPausedEvent): + self._handle_graph_run_paused(event) + return + + if isinstance(event, NodeRunStartedEvent): + self._handle_node_started(event) + return + + if isinstance(event, NodeRunRetryEvent): + self._handle_node_retry(event) + return + + if isinstance(event, NodeRunSucceededEvent): + self._handle_node_succeeded(event) + return + + if isinstance(event, NodeRunFailedEvent): + self._handle_node_failed(event) + return + + if isinstance(event, NodeRunExceptionEvent): + self._handle_node_exception(event) + return + + if isinstance(event, NodeRunPauseRequestedEvent): + self._handle_node_pause_requested(event) + + def on_graph_end(self, error: Exception | None) -> None: + return + + # ------------------------------------------------------------------ + # Graph-level handlers + # ------------------------------------------------------------------ + def _handle_graph_run_started(self) -> None: + execution_id = self._get_execution_id() + workflow_execution = WorkflowExecution.new( + id_=execution_id, + workflow_id=self._workflow_info.workflow_id, + workflow_type=self._workflow_info.workflow_type, + workflow_version=self._workflow_info.version, + graph=self._workflow_info.graph_data, + inputs=self._prepare_workflow_inputs(), + started_at=naive_utc_now(), + ) + + self._workflow_execution_repository.save(workflow_execution) + self._workflow_execution = workflow_execution + + def _handle_graph_run_succeeded(self, event: GraphRunSucceededEvent) -> None: + execution = self._get_workflow_execution() + execution.outputs = event.outputs + execution.status = WorkflowExecutionStatus.SUCCEEDED + self._populate_completion_statistics(execution) + + self._workflow_execution_repository.save(execution) + self._enqueue_trace_task(execution) + + def _handle_graph_run_partial_succeeded(self, event: GraphRunPartialSucceededEvent) -> None: + execution = self._get_workflow_execution() + execution.outputs = event.outputs + execution.status = WorkflowExecutionStatus.PARTIAL_SUCCEEDED + execution.exceptions_count = event.exceptions_count + self._populate_completion_statistics(execution) + + self._workflow_execution_repository.save(execution) + self._enqueue_trace_task(execution) + + def _handle_graph_run_failed(self, event: GraphRunFailedEvent) -> None: + execution = self._get_workflow_execution() + execution.status = WorkflowExecutionStatus.FAILED + execution.error_message = event.error + execution.exceptions_count = event.exceptions_count + self._populate_completion_statistics(execution) + + self._fail_running_node_executions(error_message=event.error) + self._workflow_execution_repository.save(execution) + self._enqueue_trace_task(execution) + + def _handle_graph_run_aborted(self, event: GraphRunAbortedEvent) -> None: + execution = self._get_workflow_execution() + execution.status = WorkflowExecutionStatus.STOPPED + execution.error_message = event.reason or "Workflow execution aborted" + self._populate_completion_statistics(execution) + + self._fail_running_node_executions(error_message=execution.error_message or "") + self._workflow_execution_repository.save(execution) + self._enqueue_trace_task(execution) + + def _handle_graph_run_paused(self, event: GraphRunPausedEvent) -> None: + execution = self._get_workflow_execution() + execution.status = WorkflowExecutionStatus.PAUSED + execution.error_message = event.reason or "Workflow execution paused" + execution.outputs = event.outputs + self._populate_completion_statistics(execution, update_finished=False) + + self._workflow_execution_repository.save(execution) + + # ------------------------------------------------------------------ + # Node-level handlers + # ------------------------------------------------------------------ + def _handle_node_started(self, event: NodeRunStartedEvent) -> None: + execution = self._get_workflow_execution() + + metadata = { + WorkflowNodeExecutionMetadataKey.ITERATION_ID: event.in_iteration_id, + WorkflowNodeExecutionMetadataKey.LOOP_ID: event.in_loop_id, + } + + domain_execution = WorkflowNodeExecution( + id=event.id, + node_execution_id=event.id, + workflow_id=execution.workflow_id, + workflow_execution_id=execution.id_, + predecessor_node_id=event.predecessor_node_id, + index=self._next_node_sequence(), + node_id=event.node_id, + node_type=event.node_type, + title=event.node_title, + status=WorkflowNodeExecutionStatus.RUNNING, + metadata=metadata, + created_at=event.start_at, + ) + + self._node_execution_cache[event.id] = domain_execution + self._workflow_node_execution_repository.save(domain_execution) + + snapshot = _NodeRuntimeSnapshot( + node_id=event.node_id, + title=event.node_title, + predecessor_node_id=event.predecessor_node_id, + iteration_id=event.in_iteration_id, + loop_id=event.in_loop_id, + created_at=event.start_at, + ) + self._node_snapshots[event.id] = snapshot + + def _handle_node_retry(self, event: NodeRunRetryEvent) -> None: + domain_execution = self._get_node_execution(event.id) + domain_execution.status = WorkflowNodeExecutionStatus.RETRY + domain_execution.error = event.error + self._workflow_node_execution_repository.save(domain_execution) + self._workflow_node_execution_repository.save_execution_data(domain_execution) + + def _handle_node_succeeded(self, event: NodeRunSucceededEvent) -> None: + domain_execution = self._get_node_execution(event.id) + self._update_node_execution(domain_execution, event.node_run_result, WorkflowNodeExecutionStatus.SUCCEEDED) + + def _handle_node_failed(self, event: NodeRunFailedEvent) -> None: + domain_execution = self._get_node_execution(event.id) + self._update_node_execution( + domain_execution, + event.node_run_result, + WorkflowNodeExecutionStatus.FAILED, + error=event.error, + ) + + def _handle_node_exception(self, event: NodeRunExceptionEvent) -> None: + domain_execution = self._get_node_execution(event.id) + self._update_node_execution( + domain_execution, + event.node_run_result, + WorkflowNodeExecutionStatus.EXCEPTION, + error=event.error, + ) + + def _handle_node_pause_requested(self, event: NodeRunPauseRequestedEvent) -> None: + domain_execution = self._get_node_execution(event.id) + self._update_node_execution( + domain_execution, + event.node_run_result, + WorkflowNodeExecutionStatus.PAUSED, + error=event.reason, + update_outputs=False, + ) + + # ------------------------------------------------------------------ + # Helpers + # ------------------------------------------------------------------ + def _get_execution_id(self) -> str: + workflow_execution_id = self._system_variables().get(SystemVariableKey.WORKFLOW_EXECUTION_ID) + if not workflow_execution_id: + raise ValueError("workflow_execution_id must be provided in system variables for pause/resume flows") + return str(workflow_execution_id) + + def _prepare_workflow_inputs(self) -> Mapping[str, Any]: + inputs = {**self._application_generate_entity.inputs} + for field_name, value in self._system_variables().items(): + if field_name == SystemVariableKey.CONVERSATION_ID.value: + # Conversation IDs are tied to the current session; omit them so persisted + # workflow inputs stay reusable without binding future runs to this conversation. + continue + inputs[f"sys.{field_name}"] = value + handled = WorkflowEntry.handle_special_values(inputs) + return handled or {} + + def _get_workflow_execution(self) -> WorkflowExecution: + if self._workflow_execution is None: + raise ValueError("workflow execution not initialized") + return self._workflow_execution + + def _get_node_execution(self, node_execution_id: str) -> WorkflowNodeExecution: + if node_execution_id not in self._node_execution_cache: + raise ValueError(f"Node execution not found for id={node_execution_id}") + return self._node_execution_cache[node_execution_id] + + def _next_node_sequence(self) -> int: + self._node_sequence += 1 + return self._node_sequence + + def _populate_completion_statistics(self, execution: WorkflowExecution, *, update_finished: bool = True) -> None: + if update_finished: + execution.finished_at = naive_utc_now() + runtime_state = self.graph_runtime_state + if runtime_state is None: + return + execution.total_tokens = runtime_state.total_tokens + execution.total_steps = runtime_state.node_run_steps + execution.outputs = execution.outputs or runtime_state.outputs + execution.exceptions_count = runtime_state.exceptions_count + + def _update_node_execution( + self, + domain_execution: WorkflowNodeExecution, + node_result: NodeRunResult, + status: WorkflowNodeExecutionStatus, + *, + error: str | None = None, + update_outputs: bool = True, + ) -> None: + finished_at = naive_utc_now() + snapshot = self._node_snapshots.get(domain_execution.id) + start_at = snapshot.created_at if snapshot else domain_execution.created_at + domain_execution.status = status + domain_execution.finished_at = finished_at + domain_execution.elapsed_time = max((finished_at - start_at).total_seconds(), 0.0) + + if error: + domain_execution.error = error + + if update_outputs: + domain_execution.update_from_mapping( + inputs=node_result.inputs, + process_data=node_result.process_data, + outputs=node_result.outputs, + metadata=node_result.metadata, + ) + + self._workflow_node_execution_repository.save(domain_execution) + self._workflow_node_execution_repository.save_execution_data(domain_execution) + + def _fail_running_node_executions(self, *, error_message: str) -> None: + now = naive_utc_now() + for execution in self._node_execution_cache.values(): + if execution.status == WorkflowNodeExecutionStatus.RUNNING: + execution.status = WorkflowNodeExecutionStatus.FAILED + execution.error = error_message + execution.finished_at = now + execution.elapsed_time = max((now - execution.created_at).total_seconds(), 0.0) + self._workflow_node_execution_repository.save(execution) + + def _enqueue_trace_task(self, execution: WorkflowExecution) -> None: + if not self._trace_manager: + return + + conversation_id = self._system_variables().get(SystemVariableKey.CONVERSATION_ID.value) + external_trace_id = None + if isinstance(self._application_generate_entity, (WorkflowAppGenerateEntity, AdvancedChatAppGenerateEntity)): + external_trace_id = self._application_generate_entity.extras.get("external_trace_id") + + trace_task = TraceTask( + TraceTaskName.WORKFLOW_TRACE, + workflow_execution=execution, + conversation_id=conversation_id, + user_id=self._trace_manager.user_id, + external_trace_id=external_trace_id, + ) + self._trace_manager.add_trace_task(trace_task) + + def _system_variables(self) -> Mapping[str, Any]: + runtime_state = self.graph_runtime_state + if runtime_state is None: + return {} + return runtime_state.variable_pool.get_by_prefix(SYSTEM_VARIABLE_NODE_ID) diff --git a/api/core/workflow/graph_engine/manager.py b/api/core/workflow/graph_engine/manager.py index ed62209acb..f05d43d8ad 100644 --- a/api/core/workflow/graph_engine/manager.py +++ b/api/core/workflow/graph_engine/manager.py @@ -9,7 +9,7 @@ Supports stop, pause, and resume operations. from typing import final from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel -from core.workflow.graph_engine.entities.commands import AbortCommand +from core.workflow.graph_engine.entities.commands import AbortCommand, GraphEngineCommand, PauseCommand from extensions.ext_redis import redis_client @@ -20,7 +20,7 @@ class GraphEngineManager: This class provides a simple interface for controlling workflow executions by sending commands through Redis channels, without user validation. - Supports stop, pause, and resume operations. + Supports stop and pause operations. """ @staticmethod @@ -32,19 +32,29 @@ class GraphEngineManager: task_id: The task ID of the workflow to stop reason: Optional reason for stopping (defaults to "User requested stop") """ + abort_command = AbortCommand(reason=reason or "User requested stop") + GraphEngineManager._send_command(task_id, abort_command) + + @staticmethod + def send_pause_command(task_id: str, reason: str | None = None) -> None: + """Send a pause command to a running workflow.""" + + pause_command = PauseCommand(reason=reason or "User requested pause") + GraphEngineManager._send_command(task_id, pause_command) + + @staticmethod + def _send_command(task_id: str, command: GraphEngineCommand) -> None: + """Send a command to the workflow-specific Redis channel.""" + if not task_id: return - # Create Redis channel for this task channel_key = f"workflow:{task_id}:commands" channel = RedisChannel(redis_client, channel_key) - # Create and send abort command - abort_command = AbortCommand(reason=reason or "User requested stop") - try: - channel.send_command(abort_command) + channel.send_command(command) except Exception: # Silently fail if Redis is unavailable - # The legacy stop flag mechanism will still work + # The legacy control mechanisms will still work pass diff --git a/api/core/workflow/graph_engine/orchestration/dispatcher.py b/api/core/workflow/graph_engine/orchestration/dispatcher.py index a7229ce4e8..4097cead9c 100644 --- a/api/core/workflow/graph_engine/orchestration/dispatcher.py +++ b/api/core/workflow/graph_engine/orchestration/dispatcher.py @@ -8,7 +8,12 @@ import threading import time from typing import TYPE_CHECKING, final -from core.workflow.graph_events.base import GraphNodeEventBase +from core.workflow.graph_events import ( + GraphNodeEventBase, + NodeRunExceptionEvent, + NodeRunFailedEvent, + NodeRunSucceededEvent, +) from ..event_management import EventManager from .execution_coordinator import ExecutionCoordinator @@ -28,6 +33,12 @@ class Dispatcher: with timeout and completion detection. """ + _COMMAND_TRIGGER_EVENTS = ( + NodeRunSucceededEvent, + NodeRunFailedEvent, + NodeRunExceptionEvent, + ) + def __init__( self, event_queue: queue.Queue[GraphNodeEventBase], @@ -76,22 +87,37 @@ class Dispatcher: """Main dispatcher loop.""" try: while not self._stop_event.is_set(): - # Check for commands - self._execution_coordinator.check_commands() + commands_checked = False + should_check_commands = False + should_break = False - # Check for scaling - self._execution_coordinator.check_scaling() + if self._execution_coordinator.is_execution_complete(): + should_check_commands = True + should_break = True + else: + # Check for scaling + self._execution_coordinator.check_scaling() - # Process events - try: - event = self._event_queue.get(timeout=0.1) - # Route to the event handler - self._event_handler.dispatch(event) - self._event_queue.task_done() - except queue.Empty: - # Check if execution is complete - if self._execution_coordinator.is_execution_complete(): - break + # Process events + try: + event = self._event_queue.get(timeout=0.1) + # Route to the event handler + self._event_handler.dispatch(event) + should_check_commands = self._should_check_commands(event) + self._event_queue.task_done() + except queue.Empty: + # Process commands even when no new events arrive so abort requests are not missed + should_check_commands = True + time.sleep(0.1) + + if should_check_commands and not commands_checked: + self._execution_coordinator.check_commands() + commands_checked = True + + if should_break: + if not commands_checked: + self._execution_coordinator.check_commands() + break except Exception as e: logger.exception("Dispatcher error") @@ -102,3 +128,7 @@ class Dispatcher: # Signal the event emitter that execution is complete if self._event_emitter: self._event_emitter.mark_complete() + + def _should_check_commands(self, event: GraphNodeEventBase) -> bool: + """Return True if the event represents a node completion.""" + return isinstance(event, self._COMMAND_TRIGGER_EVENTS) diff --git a/api/core/workflow/graph_engine/orchestration/execution_coordinator.py b/api/core/workflow/graph_engine/orchestration/execution_coordinator.py index b35e8bb6d8..a3162de244 100644 --- a/api/core/workflow/graph_engine/orchestration/execution_coordinator.py +++ b/api/core/workflow/graph_engine/orchestration/execution_coordinator.py @@ -2,17 +2,13 @@ Execution coordinator for managing overall workflow execution. """ -from typing import TYPE_CHECKING, final +from typing import final from ..command_processing import CommandProcessor from ..domain import GraphExecution -from ..event_management import EventManager from ..graph_state_manager import GraphStateManager from ..worker_management import WorkerPool -if TYPE_CHECKING: - from ..event_management import EventHandler - @final class ExecutionCoordinator: @@ -27,8 +23,6 @@ class ExecutionCoordinator: self, graph_execution: GraphExecution, state_manager: GraphStateManager, - event_handler: "EventHandler", - event_collector: EventManager, command_processor: CommandProcessor, worker_pool: WorkerPool, ) -> None: @@ -38,15 +32,11 @@ class ExecutionCoordinator: Args: graph_execution: Graph execution aggregate state_manager: Unified state manager - event_handler: Event handler registry for processing events - event_collector: Event manager for collecting events command_processor: Processor for commands worker_pool: Pool of workers """ self._graph_execution = graph_execution self._state_manager = state_manager - self._event_handler = event_handler - self._event_collector = event_collector self._command_processor = command_processor self._worker_pool = worker_pool @@ -65,15 +55,24 @@ class ExecutionCoordinator: Returns: True if execution is complete """ - # Check if aborted or failed + # Treat paused, aborted, or failed executions as terminal states + if self._graph_execution.is_paused: + return True + if self._graph_execution.aborted or self._graph_execution.has_error: return True - # Complete if no work remains return self._state_manager.is_execution_complete() + @property + def is_paused(self) -> bool: + """Expose whether the underlying graph execution is paused.""" + return self._graph_execution.is_paused + def mark_complete(self) -> None: """Mark execution as complete.""" + if self._graph_execution.is_paused: + return if not self._graph_execution.completed: self._graph_execution.complete() @@ -85,3 +84,21 @@ class ExecutionCoordinator: error: The error that caused failure """ self._graph_execution.fail(error) + + def handle_pause_if_needed(self) -> None: + """If the execution has been paused, stop workers immediately.""" + + if not self._graph_execution.is_paused: + return + + self._worker_pool.stop() + self._state_manager.clear_executing() + + def handle_abort_if_needed(self) -> None: + """If the execution has been aborted, stop workers immediately.""" + + if not self._graph_execution.aborted: + return + + self._worker_pool.stop() + self._state_manager.clear_executing() diff --git a/api/core/workflow/graph_engine/response_coordinator/coordinator.py b/api/core/workflow/graph_engine/response_coordinator/coordinator.py index 3db40c545e..98e0ea91ef 100644 --- a/api/core/workflow/graph_engine/response_coordinator/coordinator.py +++ b/api/core/workflow/graph_engine/response_coordinator/coordinator.py @@ -14,11 +14,11 @@ from uuid import uuid4 from pydantic import BaseModel, Field -from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import NodeExecutionType, NodeState from core.workflow.graph import Graph from core.workflow.graph_events import NodeRunStreamChunkEvent, NodeRunSucceededEvent from core.workflow.nodes.base.template import TextSegment, VariableSegment +from core.workflow.runtime import VariablePool from .path import Path from .session import ResponseSession diff --git a/api/core/workflow/graph_events/__init__.py b/api/core/workflow/graph_events/__init__.py index 42a376d4ad..7a5edbb331 100644 --- a/api/core/workflow/graph_events/__init__.py +++ b/api/core/workflow/graph_events/__init__.py @@ -13,6 +13,7 @@ from .graph import ( GraphRunAbortedEvent, GraphRunFailedEvent, GraphRunPartialSucceededEvent, + GraphRunPausedEvent, GraphRunStartedEvent, GraphRunSucceededEvent, ) @@ -37,6 +38,7 @@ from .loop import ( from .node import ( NodeRunExceptionEvent, NodeRunFailedEvent, + NodeRunPauseRequestedEvent, NodeRunRetrieverResourceEvent, NodeRunRetryEvent, NodeRunStartedEvent, @@ -51,6 +53,7 @@ __all__ = [ "GraphRunAbortedEvent", "GraphRunFailedEvent", "GraphRunPartialSucceededEvent", + "GraphRunPausedEvent", "GraphRunStartedEvent", "GraphRunSucceededEvent", "NodeRunAgentLogEvent", @@ -64,6 +67,7 @@ __all__ = [ "NodeRunLoopNextEvent", "NodeRunLoopStartedEvent", "NodeRunLoopSucceededEvent", + "NodeRunPauseRequestedEvent", "NodeRunRetrieverResourceEvent", "NodeRunRetryEvent", "NodeRunStartedEvent", diff --git a/api/core/workflow/graph_events/graph.py b/api/core/workflow/graph_events/graph.py index 5d13833faa..0da962aa1c 100644 --- a/api/core/workflow/graph_events/graph.py +++ b/api/core/workflow/graph_events/graph.py @@ -8,7 +8,12 @@ class GraphRunStartedEvent(BaseGraphEvent): class GraphRunSucceededEvent(BaseGraphEvent): - outputs: dict[str, object] = Field(default_factory=dict) + """Event emitted when a run completes successfully with final outputs.""" + + outputs: dict[str, object] = Field( + default_factory=dict, + description="Final workflow outputs keyed by output selector.", + ) class GraphRunFailedEvent(BaseGraphEvent): @@ -17,12 +22,30 @@ class GraphRunFailedEvent(BaseGraphEvent): class GraphRunPartialSucceededEvent(BaseGraphEvent): + """Event emitted when a run finishes with partial success and failures.""" + exceptions_count: int = Field(..., description="exception count") - outputs: dict[str, object] = Field(default_factory=dict) + outputs: dict[str, object] = Field( + default_factory=dict, + description="Outputs that were materialised before failures occurred.", + ) class GraphRunAbortedEvent(BaseGraphEvent): """Event emitted when a graph run is aborted by user command.""" reason: str | None = Field(default=None, description="reason for abort") - outputs: dict[str, object] = Field(default_factory=dict, description="partial outputs if any") + outputs: dict[str, object] = Field( + default_factory=dict, + description="Outputs produced before the abort was requested.", + ) + + +class GraphRunPausedEvent(BaseGraphEvent): + """Event emitted when a graph run is paused by user command.""" + + reason: str | None = Field(default=None, description="reason for pause") + outputs: dict[str, object] = Field( + default_factory=dict, + description="Outputs available to the client while the run is paused.", + ) diff --git a/api/core/workflow/graph_events/node.py b/api/core/workflow/graph_events/node.py index 1d35a69c4a..b880df60d1 100644 --- a/api/core/workflow/graph_events/node.py +++ b/api/core/workflow/graph_events/node.py @@ -51,3 +51,7 @@ class NodeRunExceptionEvent(GraphNodeEventBase): class NodeRunRetryEvent(NodeRunStartedEvent): error: str = Field(..., description="error") retry_index: int = Field(..., description="which retry attempt is about to be performed") + + +class NodeRunPauseRequestedEvent(GraphNodeEventBase): + reason: str | None = Field(default=None, description="Optional pause reason") diff --git a/api/core/workflow/node_events/__init__.py b/api/core/workflow/node_events/__init__.py index c3bcda0483..f14a594c85 100644 --- a/api/core/workflow/node_events/__init__.py +++ b/api/core/workflow/node_events/__init__.py @@ -14,6 +14,7 @@ from .loop import ( ) from .node import ( ModelInvokeCompletedEvent, + PauseRequestedEvent, RunRetrieverResourceEvent, RunRetryEvent, StreamChunkEvent, @@ -33,6 +34,7 @@ __all__ = [ "ModelInvokeCompletedEvent", "NodeEventBase", "NodeRunResult", + "PauseRequestedEvent", "RunRetrieverResourceEvent", "RunRetryEvent", "StreamChunkEvent", diff --git a/api/core/workflow/node_events/node.py b/api/core/workflow/node_events/node.py index 93dfefb679..4fd5684436 100644 --- a/api/core/workflow/node_events/node.py +++ b/api/core/workflow/node_events/node.py @@ -40,3 +40,7 @@ class StreamChunkEvent(NodeEventBase): class StreamCompletedEvent(NodeEventBase): node_run_result: NodeRunResult = Field(..., description="run result") + + +class PauseRequestedEvent(NodeEventBase): + reason: str | None = Field(default=None, description="Optional pause reason") diff --git a/api/core/workflow/nodes/agent/agent_node.py b/api/core/workflow/nodes/agent/agent_node.py index 4a24b18465..626ef1df7b 100644 --- a/api/core/workflow/nodes/agent/agent_node.py +++ b/api/core/workflow/nodes/agent/agent_node.py @@ -25,7 +25,6 @@ from core.tools.entities.tool_entities import ( from core.tools.tool_manager import ToolManager from core.tools.utils.message_transformer import ToolFileMessageTransformer from core.variables.segments import ArrayFileSegment, StringSegment -from core.workflow.entities import VariablePool from core.workflow.enums import ( ErrorStrategy, NodeType, @@ -44,6 +43,7 @@ from core.workflow.nodes.agent.entities import AgentNodeData, AgentOldVersionMod from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.base.variable_template_parser import VariableTemplateParser +from core.workflow.runtime import VariablePool from extensions.ext_database import db from factories import file_factory from factories.agent_factory import get_plugin_agent_strategy diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py index 41212abb0e..7f8c1eddff 100644 --- a/api/core/workflow/nodes/base/node.py +++ b/api/core/workflow/nodes/base/node.py @@ -6,7 +6,7 @@ from typing import Any, ClassVar from uuid import uuid4 from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import AgentNodeStrategyInit, GraphInitParams, GraphRuntimeState +from core.workflow.entities import AgentNodeStrategyInit, GraphInitParams from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeState, NodeType, WorkflowNodeExecutionStatus from core.workflow.graph_events import ( GraphNodeEventBase, @@ -20,6 +20,7 @@ from core.workflow.graph_events import ( NodeRunLoopNextEvent, NodeRunLoopStartedEvent, NodeRunLoopSucceededEvent, + NodeRunPauseRequestedEvent, NodeRunRetrieverResourceEvent, NodeRunStartedEvent, NodeRunStreamChunkEvent, @@ -37,10 +38,12 @@ from core.workflow.node_events import ( LoopSucceededEvent, NodeEventBase, NodeRunResult, + PauseRequestedEvent, RunRetrieverResourceEvent, StreamChunkEvent, StreamCompletedEvent, ) +from core.workflow.runtime import GraphRuntimeState from libs.datetime_utils import naive_utc_now from models.enums import UserFrom @@ -385,6 +388,16 @@ class Node: f"Node {self._node_id} does not support status {event.node_run_result.status}" ) + @_dispatch.register + def _(self, event: PauseRequestedEvent) -> NodeRunPauseRequestedEvent: + return NodeRunPauseRequestedEvent( + id=self._node_execution_id, + node_id=self._node_id, + node_type=self.node_type, + node_run_result=NodeRunResult(status=WorkflowNodeExecutionStatus.PAUSED), + reason=event.reason, + ) + @_dispatch.register def _(self, event: AgentLogEvent) -> NodeRunAgentLogEvent: return NodeRunAgentLogEvent( diff --git a/api/core/workflow/nodes/datasource/datasource_node.py b/api/core/workflow/nodes/datasource/datasource_node.py index e392cb5f5c..34c1db9468 100644 --- a/api/core/workflow/nodes/datasource/datasource_node.py +++ b/api/core/workflow/nodes/datasource/datasource_node.py @@ -19,7 +19,6 @@ from core.file.enums import FileTransferMethod, FileType from core.plugin.impl.exc import PluginDaemonClientSideError from core.variables.segments import ArrayAnySegment from core.variables.variables import ArrayAnyVariable -from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeType, SystemVariableKey from core.workflow.node_events import NodeRunResult, StreamChunkEvent, StreamCompletedEvent @@ -27,6 +26,7 @@ from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.base.variable_template_parser import VariableTemplateParser from core.workflow.nodes.tool.exc import ToolFileError +from core.workflow.runtime import VariablePool from extensions.ext_database import db from factories import file_factory from models.model import UploadFile diff --git a/api/core/workflow/nodes/http_request/executor.py b/api/core/workflow/nodes/http_request/executor.py index d3d3571b44..7b5b9c9e86 100644 --- a/api/core/workflow/nodes/http_request/executor.py +++ b/api/core/workflow/nodes/http_request/executor.py @@ -15,7 +15,7 @@ from core.file import file_manager from core.file.enums import FileTransferMethod from core.helper import ssrf_proxy from core.variables.segments import ArrayFileSegment, FileSegment -from core.workflow.entities import VariablePool +from core.workflow.runtime import VariablePool from .entities import ( HttpRequestNodeAuthorization, diff --git a/api/core/workflow/nodes/human_input/__init__.py b/api/core/workflow/nodes/human_input/__init__.py new file mode 100644 index 0000000000..379440557c --- /dev/null +++ b/api/core/workflow/nodes/human_input/__init__.py @@ -0,0 +1,3 @@ +from .human_input_node import HumanInputNode + +__all__ = ["HumanInputNode"] diff --git a/api/core/workflow/nodes/human_input/entities.py b/api/core/workflow/nodes/human_input/entities.py new file mode 100644 index 0000000000..02913d93c3 --- /dev/null +++ b/api/core/workflow/nodes/human_input/entities.py @@ -0,0 +1,10 @@ +from pydantic import Field + +from core.workflow.nodes.base import BaseNodeData + + +class HumanInputNodeData(BaseNodeData): + """Configuration schema for the HumanInput node.""" + + required_variables: list[str] = Field(default_factory=list) + pause_reason: str | None = Field(default=None) diff --git a/api/core/workflow/nodes/human_input/human_input_node.py b/api/core/workflow/nodes/human_input/human_input_node.py new file mode 100644 index 0000000000..e49f9a8c81 --- /dev/null +++ b/api/core/workflow/nodes/human_input/human_input_node.py @@ -0,0 +1,132 @@ +from collections.abc import Mapping +from typing import Any + +from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeType, WorkflowNodeExecutionStatus +from core.workflow.node_events import NodeRunResult, PauseRequestedEvent +from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig +from core.workflow.nodes.base.node import Node + +from .entities import HumanInputNodeData + + +class HumanInputNode(Node): + node_type = NodeType.HUMAN_INPUT + execution_type = NodeExecutionType.BRANCH + + _BRANCH_SELECTION_KEYS: tuple[str, ...] = ( + "edge_source_handle", + "edgeSourceHandle", + "source_handle", + "selected_branch", + "selectedBranch", + "branch", + "branch_id", + "branchId", + "handle", + ) + + _node_data: HumanInputNodeData + + def init_node_data(self, data: Mapping[str, Any]) -> None: + self._node_data = HumanInputNodeData(**data) + + def get_base_node_data(self) -> BaseNodeData: + return self._node_data + + @classmethod + def version(cls) -> str: + return "1" + + def _get_error_strategy(self) -> ErrorStrategy | None: + return self._node_data.error_strategy + + def _get_retry_config(self) -> RetryConfig: + return self._node_data.retry_config + + def _get_title(self) -> str: + return self._node_data.title + + def _get_description(self) -> str | None: + return self._node_data.desc + + def _get_default_value_dict(self) -> dict[str, Any]: + return self._node_data.default_value_dict + + def _run(self): # type: ignore[override] + if self._is_completion_ready(): + branch_handle = self._resolve_branch_selection() + return NodeRunResult( + status=WorkflowNodeExecutionStatus.SUCCEEDED, + outputs={}, + edge_source_handle=branch_handle or "source", + ) + + return self._pause_generator() + + def _pause_generator(self): + yield PauseRequestedEvent(reason=self._node_data.pause_reason) + + def _is_completion_ready(self) -> bool: + """Determine whether all required inputs are satisfied.""" + + if not self._node_data.required_variables: + return False + + variable_pool = self.graph_runtime_state.variable_pool + + for selector_str in self._node_data.required_variables: + parts = selector_str.split(".") + if len(parts) != 2: + return False + segment = variable_pool.get(parts) + if segment is None: + return False + + return True + + def _resolve_branch_selection(self) -> str | None: + """Determine the branch handle selected by human input if available.""" + + variable_pool = self.graph_runtime_state.variable_pool + + for key in self._BRANCH_SELECTION_KEYS: + handle = self._extract_branch_handle(variable_pool.get((self.id, key))) + if handle: + return handle + + default_values = self._node_data.default_value_dict + for key in self._BRANCH_SELECTION_KEYS: + handle = self._normalize_branch_value(default_values.get(key)) + if handle: + return handle + + return None + + @staticmethod + def _extract_branch_handle(segment: Any) -> str | None: + if segment is None: + return None + + candidate = getattr(segment, "to_object", None) + raw_value = candidate() if callable(candidate) else getattr(segment, "value", None) + if raw_value is None: + return None + + return HumanInputNode._normalize_branch_value(raw_value) + + @staticmethod + def _normalize_branch_value(value: Any) -> str | None: + if value is None: + return None + + if isinstance(value, str): + stripped = value.strip() + return stripped or None + + if isinstance(value, Mapping): + for key in ("handle", "edge_source_handle", "edgeSourceHandle", "branch", "id", "value"): + candidate = value.get(key) + if isinstance(candidate, str) and candidate: + return candidate + + return None diff --git a/api/core/workflow/nodes/if_else/if_else_node.py b/api/core/workflow/nodes/if_else/if_else_node.py index 7e3b6ecc1a..165e529714 100644 --- a/api/core/workflow/nodes/if_else/if_else_node.py +++ b/api/core/workflow/nodes/if_else/if_else_node.py @@ -3,12 +3,12 @@ from typing import Any, Literal from typing_extensions import deprecated -from core.workflow.entities import VariablePool from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeType, WorkflowNodeExecutionStatus from core.workflow.node_events import NodeRunResult from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.if_else.entities import IfElseNodeData +from core.workflow.runtime import VariablePool from core.workflow.utils.condition.entities import Condition from core.workflow.utils.condition.processor import ConditionProcessor diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index c089a68bd4..41060bd569 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -12,7 +12,6 @@ from core.variables import IntegerVariable, NoneSegment from core.variables.segments import ArrayAnySegment, ArraySegment from core.variables.variables import VariableUnion from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID -from core.workflow.entities import VariablePool from core.workflow.enums import ( ErrorStrategy, NodeExecutionType, @@ -38,6 +37,7 @@ from core.workflow.node_events import ( from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.iteration.entities import ErrorHandleMode, IterationNodeData +from core.workflow.runtime import VariablePool from libs.datetime_utils import naive_utc_now from libs.flask_utils import preserve_flask_contexts @@ -557,11 +557,12 @@ class IterationNode(Node): def _create_graph_engine(self, index: int, item: object): # Import dependencies - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel from core.workflow.nodes.node_factory import DifyNodeFactory + from core.workflow.runtime import GraphRuntimeState # Create GraphInitParams from node attributes graph_init_params = GraphInitParams( diff --git a/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py b/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py index 2751f24048..2ba1e5e1c5 100644 --- a/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py +++ b/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py @@ -9,13 +9,13 @@ from sqlalchemy import func, select from core.app.entities.app_invoke_entities import InvokeFrom from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from core.rag.retrieval.retrieval_methods import RetrievalMethod -from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus from core.workflow.enums import ErrorStrategy, NodeExecutionType, NodeType, SystemVariableKey from core.workflow.node_events import NodeRunResult from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.base.template import Template +from core.workflow.runtime import VariablePool from extensions.ext_database import db from models.dataset import Dataset, Document, DocumentSegment diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 7091b62463..2dc3cb9320 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -67,7 +67,7 @@ from .exc import ( if TYPE_CHECKING: from core.file.models import File - from core.workflow.entities import GraphRuntimeState + from core.workflow.runtime import GraphRuntimeState logger = logging.getLogger(__name__) diff --git a/api/core/workflow/nodes/llm/llm_utils.py b/api/core/workflow/nodes/llm/llm_utils.py index aff84433b2..0c545469bc 100644 --- a/api/core/workflow/nodes/llm/llm_utils.py +++ b/api/core/workflow/nodes/llm/llm_utils.py @@ -15,9 +15,9 @@ from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.prompt.entities.advanced_prompt_entities import MemoryConfig from core.variables.segments import ArrayAnySegment, ArrayFileSegment, FileSegment, NoneSegment, StringSegment -from core.workflow.entities import VariablePool from core.workflow.enums import SystemVariableKey from core.workflow.nodes.llm.entities import ModelConfig +from core.workflow.runtime import VariablePool from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from models.model import Conversation diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 13f6d904e6..1644f683bf 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -52,7 +52,7 @@ from core.variables import ( StringSegment, ) from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID -from core.workflow.entities import GraphInitParams, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import ( ErrorStrategy, NodeType, @@ -71,6 +71,7 @@ from core.workflow.node_events import ( from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig, VariableSelector from core.workflow.nodes.base.node import Node from core.workflow.nodes.base.variable_template_parser import VariableTemplateParser +from core.workflow.runtime import VariablePool from . import llm_utils from .entities import ( @@ -93,7 +94,7 @@ from .file_saver import FileSaverImpl, LLMFileSaver if TYPE_CHECKING: from core.file.models import File - from core.workflow.entities import GraphRuntimeState + from core.workflow.runtime import GraphRuntimeState logger = logging.getLogger(__name__) @@ -440,10 +441,14 @@ class LLMNode(Node): usage = LLMUsage.empty_usage() finish_reason = None full_text_buffer = io.StringIO() + collected_structured_output = None # Collect structured_output from streaming chunks # Consume the invoke result and handle generator exception try: for result in invoke_result: if isinstance(result, LLMResultChunkWithStructuredOutput): + # Collect structured_output from the chunk + if result.structured_output is not None: + collected_structured_output = dict(result.structured_output) yield result if isinstance(result, LLMResultChunk): contents = result.delta.message.content @@ -491,6 +496,8 @@ class LLMNode(Node): finish_reason=finish_reason, # Reasoning content for workflow variables and downstream nodes reasoning_content=reasoning_content, + # Pass structured output if collected from streaming chunks + structured_output=collected_structured_output, ) @staticmethod diff --git a/api/core/workflow/nodes/loop/loop_node.py b/api/core/workflow/nodes/loop/loop_node.py index 790975d556..b51790c0a2 100644 --- a/api/core/workflow/nodes/loop/loop_node.py +++ b/api/core/workflow/nodes/loop/loop_node.py @@ -406,11 +406,12 @@ class LoopNode(Node): def _create_graph_engine(self, start_at: datetime, root_node_id: str): # Import dependencies - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel from core.workflow.nodes.node_factory import DifyNodeFactory + from core.workflow.runtime import GraphRuntimeState # Create GraphInitParams from node attributes graph_init_params = GraphInitParams( diff --git a/api/core/workflow/nodes/node_factory.py b/api/core/workflow/nodes/node_factory.py index df1d685909..87d1b8c435 100644 --- a/api/core/workflow/nodes/node_factory.py +++ b/api/core/workflow/nodes/node_factory.py @@ -10,7 +10,8 @@ from libs.typing import is_str, is_str_dict from .node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING if TYPE_CHECKING: - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState @final diff --git a/api/core/workflow/nodes/node_mapping.py b/api/core/workflow/nodes/node_mapping.py index 3d3a1bec98..3ee28802f1 100644 --- a/api/core/workflow/nodes/node_mapping.py +++ b/api/core/workflow/nodes/node_mapping.py @@ -9,6 +9,7 @@ from core.workflow.nodes.datasource.datasource_node import DatasourceNode from core.workflow.nodes.document_extractor import DocumentExtractorNode from core.workflow.nodes.end.end_node import EndNode from core.workflow.nodes.http_request import HttpRequestNode +from core.workflow.nodes.human_input import HumanInputNode from core.workflow.nodes.if_else import IfElseNode from core.workflow.nodes.iteration import IterationNode, IterationStartNode from core.workflow.nodes.knowledge_index import KnowledgeIndexNode @@ -134,6 +135,10 @@ NODE_TYPE_CLASSES_MAPPING: Mapping[NodeType, Mapping[str, type[Node]]] = { "2": AgentNode, "1": AgentNode, }, + NodeType.HUMAN_INPUT: { + LATEST_VERSION: HumanInputNode, + "1": HumanInputNode, + }, NodeType.DATASOURCE: { LATEST_VERSION: DatasourceNode, "1": DatasourceNode, diff --git a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py index 875a0598e0..2b65cc30b6 100644 --- a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py +++ b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py @@ -27,13 +27,13 @@ from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, Comp from core.prompt.simple_prompt_transform import ModelMode from core.prompt.utils.prompt_message_util import PromptMessageUtil from core.variables.types import ArrayValidation, SegmentType -from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import ErrorStrategy, NodeType, WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus from core.workflow.node_events import NodeRunResult from core.workflow.nodes.base import variable_template_parser from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.base.node import Node from core.workflow.nodes.llm import ModelConfig, llm_utils +from core.workflow.runtime import VariablePool from factories.variable_factory import build_segment_with_type from .entities import ParameterExtractorNodeData diff --git a/api/core/workflow/nodes/question_classifier/question_classifier_node.py b/api/core/workflow/nodes/question_classifier/question_classifier_node.py index 592a6566fd..3f37fc481b 100644 --- a/api/core/workflow/nodes/question_classifier/question_classifier_node.py +++ b/api/core/workflow/nodes/question_classifier/question_classifier_node.py @@ -1,4 +1,5 @@ import json +import re from collections.abc import Mapping, Sequence from typing import TYPE_CHECKING, Any @@ -40,7 +41,7 @@ from .template_prompts import ( if TYPE_CHECKING: from core.file.models import File - from core.workflow.entities import GraphRuntimeState + from core.workflow.runtime import GraphRuntimeState class QuestionClassifierNode(Node): @@ -194,6 +195,8 @@ class QuestionClassifierNode(Node): category_name = node_data.classes[0].name category_id = node_data.classes[0].id + if "" in result_text: + result_text = re.sub(r"]*>[\s\S]*?", "", result_text, flags=re.IGNORECASE) result_text_json = parse_and_check_json_markdown(result_text, []) # result_text_json = json.loads(result_text.strip('```JSON\n')) if "category_name" in result_text_json and "category_id" in result_text_json: diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index cd0094f531..2e2c32ac93 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -36,7 +36,7 @@ from .exc import ( ) if TYPE_CHECKING: - from core.workflow.entities import VariablePool + from core.workflow.runtime import VariablePool class ToolNode(Node): diff --git a/api/core/workflow/nodes/variable_assigner/v1/node.py b/api/core/workflow/nodes/variable_assigner/v1/node.py index c2a9ecd7fb..8cd267c4a7 100644 --- a/api/core/workflow/nodes/variable_assigner/v1/node.py +++ b/api/core/workflow/nodes/variable_assigner/v1/node.py @@ -18,7 +18,7 @@ from ..common.impl import conversation_variable_updater_factory from .node_data import VariableAssignerData, WriteMode if TYPE_CHECKING: - from core.workflow.entities import GraphRuntimeState + from core.workflow.runtime import GraphRuntimeState _CONV_VAR_UPDATER_FACTORY: TypeAlias = Callable[[], ConversationVariableUpdater] diff --git a/api/core/workflow/runtime/__init__.py b/api/core/workflow/runtime/__init__.py new file mode 100644 index 0000000000..10014c7182 --- /dev/null +++ b/api/core/workflow/runtime/__init__.py @@ -0,0 +1,14 @@ +from .graph_runtime_state import GraphRuntimeState +from .graph_runtime_state_protocol import ReadOnlyGraphRuntimeState, ReadOnlyVariablePool +from .read_only_wrappers import ReadOnlyGraphRuntimeStateWrapper, ReadOnlyVariablePoolWrapper +from .variable_pool import VariablePool, VariableValue + +__all__ = [ + "GraphRuntimeState", + "ReadOnlyGraphRuntimeState", + "ReadOnlyGraphRuntimeStateWrapper", + "ReadOnlyVariablePool", + "ReadOnlyVariablePoolWrapper", + "VariablePool", + "VariableValue", +] diff --git a/api/core/workflow/runtime/graph_runtime_state.py b/api/core/workflow/runtime/graph_runtime_state.py new file mode 100644 index 0000000000..486718dc62 --- /dev/null +++ b/api/core/workflow/runtime/graph_runtime_state.py @@ -0,0 +1,393 @@ +from __future__ import annotations + +import importlib +import json +from collections.abc import Mapping, Sequence +from collections.abc import Mapping as TypingMapping +from copy import deepcopy +from typing import Any, Protocol + +from pydantic.json import pydantic_encoder + +from core.model_runtime.entities.llm_entities import LLMUsage +from core.workflow.runtime.variable_pool import VariablePool + + +class ReadyQueueProtocol(Protocol): + """Structural interface required from ready queue implementations.""" + + def put(self, item: str) -> None: + """Enqueue the identifier of a node that is ready to run.""" + ... + + def get(self, timeout: float | None = None) -> str: + """Return the next node identifier, blocking until available or timeout expires.""" + ... + + def task_done(self) -> None: + """Signal that the most recently dequeued node has completed processing.""" + ... + + def empty(self) -> bool: + """Return True when the queue contains no pending nodes.""" + ... + + def qsize(self) -> int: + """Approximate the number of pending nodes awaiting execution.""" + ... + + def dumps(self) -> str: + """Serialize the queue contents for persistence.""" + ... + + def loads(self, data: str) -> None: + """Restore the queue contents from a serialized payload.""" + ... + + +class GraphExecutionProtocol(Protocol): + """Structural interface for graph execution aggregate.""" + + workflow_id: str + started: bool + completed: bool + aborted: bool + error: Exception | None + exceptions_count: int + + def start(self) -> None: + """Transition execution into the running state.""" + ... + + def complete(self) -> None: + """Mark execution as successfully completed.""" + ... + + def abort(self, reason: str) -> None: + """Abort execution in response to an external stop request.""" + ... + + def fail(self, error: Exception) -> None: + """Record an unrecoverable error and end execution.""" + ... + + def dumps(self) -> str: + """Serialize execution state into a JSON payload.""" + ... + + def loads(self, data: str) -> None: + """Restore execution state from a previously serialized payload.""" + ... + + +class ResponseStreamCoordinatorProtocol(Protocol): + """Structural interface for response stream coordinator.""" + + def register(self, response_node_id: str) -> None: + """Register a response node so its outputs can be streamed.""" + ... + + def loads(self, data: str) -> None: + """Restore coordinator state from a serialized payload.""" + ... + + def dumps(self) -> str: + """Serialize coordinator state for persistence.""" + ... + + +class GraphProtocol(Protocol): + """Structural interface required from graph instances attached to the runtime state.""" + + nodes: TypingMapping[str, object] + edges: TypingMapping[str, object] + root_node: object + + def get_outgoing_edges(self, node_id: str) -> Sequence[object]: ... + + +class GraphRuntimeState: + """Mutable runtime state shared across graph execution components.""" + + def __init__( + self, + *, + variable_pool: VariablePool, + start_at: float, + total_tokens: int = 0, + llm_usage: LLMUsage | None = None, + outputs: dict[str, object] | None = None, + node_run_steps: int = 0, + ready_queue: ReadyQueueProtocol | None = None, + graph_execution: GraphExecutionProtocol | None = None, + response_coordinator: ResponseStreamCoordinatorProtocol | None = None, + graph: GraphProtocol | None = None, + ) -> None: + self._variable_pool = variable_pool + self._start_at = start_at + + if total_tokens < 0: + raise ValueError("total_tokens must be non-negative") + self._total_tokens = total_tokens + + self._llm_usage = (llm_usage or LLMUsage.empty_usage()).model_copy() + self._outputs = deepcopy(outputs) if outputs is not None else {} + + if node_run_steps < 0: + raise ValueError("node_run_steps must be non-negative") + self._node_run_steps = node_run_steps + + self._graph: GraphProtocol | None = None + + self._ready_queue = ready_queue + self._graph_execution = graph_execution + self._response_coordinator = response_coordinator + self._pending_response_coordinator_dump: str | None = None + self._pending_graph_execution_workflow_id: str | None = None + self._paused_nodes: set[str] = set() + + if graph is not None: + self.attach_graph(graph) + + # ------------------------------------------------------------------ + # Context binding helpers + # ------------------------------------------------------------------ + def attach_graph(self, graph: GraphProtocol) -> None: + """Attach the materialized graph to the runtime state.""" + if self._graph is not None and self._graph is not graph: + raise ValueError("GraphRuntimeState already attached to a different graph instance") + + self._graph = graph + + if self._response_coordinator is None: + self._response_coordinator = self._build_response_coordinator(graph) + + if self._pending_response_coordinator_dump is not None and self._response_coordinator is not None: + self._response_coordinator.loads(self._pending_response_coordinator_dump) + self._pending_response_coordinator_dump = None + + def configure(self, *, graph: GraphProtocol | None = None) -> None: + """Ensure core collaborators are initialized with the provided context.""" + if graph is not None: + self.attach_graph(graph) + + # Ensure collaborators are instantiated + _ = self.ready_queue + _ = self.graph_execution + if self._graph is not None: + _ = self.response_coordinator + + # ------------------------------------------------------------------ + # Primary collaborators + # ------------------------------------------------------------------ + @property + def variable_pool(self) -> VariablePool: + return self._variable_pool + + @property + def ready_queue(self) -> ReadyQueueProtocol: + if self._ready_queue is None: + self._ready_queue = self._build_ready_queue() + return self._ready_queue + + @property + def graph_execution(self) -> GraphExecutionProtocol: + if self._graph_execution is None: + self._graph_execution = self._build_graph_execution() + return self._graph_execution + + @property + def response_coordinator(self) -> ResponseStreamCoordinatorProtocol: + if self._response_coordinator is None: + if self._graph is None: + raise ValueError("Graph must be attached before accessing response coordinator") + self._response_coordinator = self._build_response_coordinator(self._graph) + return self._response_coordinator + + # ------------------------------------------------------------------ + # Scalar state + # ------------------------------------------------------------------ + @property + def start_at(self) -> float: + return self._start_at + + @start_at.setter + def start_at(self, value: float) -> None: + self._start_at = value + + @property + def total_tokens(self) -> int: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: int) -> None: + if value < 0: + raise ValueError("total_tokens must be non-negative") + self._total_tokens = value + + @property + def llm_usage(self) -> LLMUsage: + return self._llm_usage.model_copy() + + @llm_usage.setter + def llm_usage(self, value: LLMUsage) -> None: + self._llm_usage = value.model_copy() + + @property + def outputs(self) -> dict[str, Any]: + return deepcopy(self._outputs) + + @outputs.setter + def outputs(self, value: dict[str, Any]) -> None: + self._outputs = deepcopy(value) + + def set_output(self, key: str, value: object) -> None: + self._outputs[key] = deepcopy(value) + + def get_output(self, key: str, default: object = None) -> object: + return deepcopy(self._outputs.get(key, default)) + + def update_outputs(self, updates: dict[str, object]) -> None: + for key, value in updates.items(): + self._outputs[key] = deepcopy(value) + + @property + def node_run_steps(self) -> int: + return self._node_run_steps + + @node_run_steps.setter + def node_run_steps(self, value: int) -> None: + if value < 0: + raise ValueError("node_run_steps must be non-negative") + self._node_run_steps = value + + def increment_node_run_steps(self) -> None: + self._node_run_steps += 1 + + def add_tokens(self, tokens: int) -> None: + if tokens < 0: + raise ValueError("tokens must be non-negative") + self._total_tokens += tokens + + # ------------------------------------------------------------------ + # Serialization + # ------------------------------------------------------------------ + def dumps(self) -> str: + """Serialize runtime state into a JSON string.""" + + snapshot: dict[str, Any] = { + "version": "1.0", + "start_at": self._start_at, + "total_tokens": self._total_tokens, + "node_run_steps": self._node_run_steps, + "llm_usage": self._llm_usage.model_dump(mode="json"), + "outputs": self.outputs, + "variable_pool": self.variable_pool.model_dump(mode="json"), + "ready_queue": self.ready_queue.dumps(), + "graph_execution": self.graph_execution.dumps(), + "paused_nodes": list(self._paused_nodes), + } + + if self._response_coordinator is not None and self._graph is not None: + snapshot["response_coordinator"] = self._response_coordinator.dumps() + + return json.dumps(snapshot, default=pydantic_encoder) + + def loads(self, data: str | Mapping[str, Any]) -> None: + """Restore runtime state from a serialized snapshot.""" + + payload: dict[str, Any] + if isinstance(data, str): + payload = json.loads(data) + else: + payload = dict(data) + + version = payload.get("version") + if version != "1.0": + raise ValueError(f"Unsupported GraphRuntimeState snapshot version: {version}") + + self._start_at = float(payload.get("start_at", 0.0)) + total_tokens = int(payload.get("total_tokens", 0)) + if total_tokens < 0: + raise ValueError("total_tokens must be non-negative") + self._total_tokens = total_tokens + + node_run_steps = int(payload.get("node_run_steps", 0)) + if node_run_steps < 0: + raise ValueError("node_run_steps must be non-negative") + self._node_run_steps = node_run_steps + + llm_usage_payload = payload.get("llm_usage", {}) + self._llm_usage = LLMUsage.model_validate(llm_usage_payload) + + self._outputs = deepcopy(payload.get("outputs", {})) + + variable_pool_payload = payload.get("variable_pool") + if variable_pool_payload is not None: + self._variable_pool = VariablePool.model_validate(variable_pool_payload) + + ready_queue_payload = payload.get("ready_queue") + if ready_queue_payload is not None: + self._ready_queue = self._build_ready_queue() + self._ready_queue.loads(ready_queue_payload) + else: + self._ready_queue = None + + graph_execution_payload = payload.get("graph_execution") + self._graph_execution = None + self._pending_graph_execution_workflow_id = None + if graph_execution_payload is not None: + try: + execution_payload = json.loads(graph_execution_payload) + self._pending_graph_execution_workflow_id = execution_payload.get("workflow_id") + except (json.JSONDecodeError, TypeError, AttributeError): + self._pending_graph_execution_workflow_id = None + self.graph_execution.loads(graph_execution_payload) + + response_payload = payload.get("response_coordinator") + if response_payload is not None: + if self._graph is not None: + self.response_coordinator.loads(response_payload) + else: + self._pending_response_coordinator_dump = response_payload + else: + self._pending_response_coordinator_dump = None + self._response_coordinator = None + + paused_nodes_payload = payload.get("paused_nodes", []) + self._paused_nodes = set(map(str, paused_nodes_payload)) + + def register_paused_node(self, node_id: str) -> None: + """Record a node that should resume when execution is continued.""" + + self._paused_nodes.add(node_id) + + def consume_paused_nodes(self) -> list[str]: + """Retrieve and clear the list of paused nodes awaiting resume.""" + + nodes = list(self._paused_nodes) + self._paused_nodes.clear() + return nodes + + # ------------------------------------------------------------------ + # Builders + # ------------------------------------------------------------------ + def _build_ready_queue(self) -> ReadyQueueProtocol: + # Import lazily to avoid breaching architecture boundaries enforced by import-linter. + module = importlib.import_module("core.workflow.graph_engine.ready_queue") + in_memory_cls = module.InMemoryReadyQueue + return in_memory_cls() + + def _build_graph_execution(self) -> GraphExecutionProtocol: + # Lazily import to keep the runtime domain decoupled from graph_engine modules. + module = importlib.import_module("core.workflow.graph_engine.domain.graph_execution") + graph_execution_cls = module.GraphExecution + workflow_id = self._pending_graph_execution_workflow_id or "" + self._pending_graph_execution_workflow_id = None + return graph_execution_cls(workflow_id=workflow_id) + + def _build_response_coordinator(self, graph: GraphProtocol) -> ResponseStreamCoordinatorProtocol: + # Lazily import to keep the runtime domain decoupled from graph_engine modules. + module = importlib.import_module("core.workflow.graph_engine.response_coordinator") + coordinator_cls = module.ResponseStreamCoordinator + return coordinator_cls(variable_pool=self.variable_pool, graph=graph) diff --git a/api/core/workflow/graph/graph_runtime_state_protocol.py b/api/core/workflow/runtime/graph_runtime_state_protocol.py similarity index 76% rename from api/core/workflow/graph/graph_runtime_state_protocol.py rename to api/core/workflow/runtime/graph_runtime_state_protocol.py index d7961405ca..40835a936f 100644 --- a/api/core/workflow/graph/graph_runtime_state_protocol.py +++ b/api/core/workflow/runtime/graph_runtime_state_protocol.py @@ -16,6 +16,10 @@ class ReadOnlyVariablePool(Protocol): """Get all variables for a node (read-only).""" ... + def get_by_prefix(self, prefix: str) -> Mapping[str, object]: + """Get all variables stored under a given node prefix (read-only).""" + ... + class ReadOnlyGraphRuntimeState(Protocol): """ @@ -56,6 +60,20 @@ class ReadOnlyGraphRuntimeState(Protocol): """Get the node run steps count (read-only).""" ... + @property + def ready_queue_size(self) -> int: + """Get the number of nodes currently in the ready queue.""" + ... + + @property + def exceptions_count(self) -> int: + """Get the number of node execution exceptions recorded.""" + ... + def get_output(self, key: str, default: Any = None) -> Any: """Get a single output value (returns a copy).""" ... + + def dumps(self) -> str: + """Serialize the runtime state into a JSON snapshot (read-only).""" + ... diff --git a/api/core/workflow/graph/read_only_state_wrapper.py b/api/core/workflow/runtime/read_only_wrappers.py similarity index 54% rename from api/core/workflow/graph/read_only_state_wrapper.py rename to api/core/workflow/runtime/read_only_wrappers.py index 255bb5adee..664c365295 100644 --- a/api/core/workflow/graph/read_only_state_wrapper.py +++ b/api/core/workflow/runtime/read_only_wrappers.py @@ -1,77 +1,82 @@ +from __future__ import annotations + from collections.abc import Mapping from copy import deepcopy from typing import Any from core.model_runtime.entities.llm_entities import LLMUsage from core.variables.segments import Segment -from core.workflow.entities.graph_runtime_state import GraphRuntimeState -from core.workflow.entities.variable_pool import VariablePool + +from .graph_runtime_state import GraphRuntimeState +from .variable_pool import VariablePool class ReadOnlyVariablePoolWrapper: - """Wrapper that provides read-only access to VariablePool.""" + """Provide defensive, read-only access to ``VariablePool``.""" - def __init__(self, variable_pool: VariablePool): + def __init__(self, variable_pool: VariablePool) -> None: self._variable_pool = variable_pool def get(self, node_id: str, variable_key: str) -> Segment | None: - """Get a variable value (returns a defensive copy).""" + """Return a copy of a variable value if present.""" value = self._variable_pool.get([node_id, variable_key]) return deepcopy(value) if value is not None else None def get_all_by_node(self, node_id: str) -> Mapping[str, object]: - """Get all variables for a node (returns defensive copies).""" + """Return a copy of all variables for the specified node.""" variables: dict[str, object] = {} if node_id in self._variable_pool.variable_dictionary: - for key, var in self._variable_pool.variable_dictionary[node_id].items(): - # Variables have a value property that contains the actual data - variables[key] = deepcopy(var.value) + for key, variable in self._variable_pool.variable_dictionary[node_id].items(): + variables[key] = deepcopy(variable.value) return variables + def get_by_prefix(self, prefix: str) -> Mapping[str, object]: + """Return a copy of all variables stored under the given prefix.""" + return self._variable_pool.get_by_prefix(prefix) + class ReadOnlyGraphRuntimeStateWrapper: - """ - Wrapper that provides read-only access to GraphRuntimeState. + """Expose a defensive, read-only view of ``GraphRuntimeState``.""" - This wrapper ensures that layers can observe the state without - modifying it. All returned values are defensive copies. - """ - - def __init__(self, state: GraphRuntimeState): + def __init__(self, state: GraphRuntimeState) -> None: self._state = state self._variable_pool_wrapper = ReadOnlyVariablePoolWrapper(state.variable_pool) @property def variable_pool(self) -> ReadOnlyVariablePoolWrapper: - """Get read-only access to the variable pool.""" return self._variable_pool_wrapper @property def start_at(self) -> float: - """Get the start time (read-only).""" return self._state.start_at @property def total_tokens(self) -> int: - """Get the total tokens count (read-only).""" return self._state.total_tokens @property def llm_usage(self) -> LLMUsage: - """Get a copy of LLM usage info (read-only).""" - # Return a copy to prevent modification return self._state.llm_usage.model_copy() @property def outputs(self) -> dict[str, Any]: - """Get a defensive copy of outputs (read-only).""" return deepcopy(self._state.outputs) @property def node_run_steps(self) -> int: - """Get the node run steps count (read-only).""" return self._state.node_run_steps + @property + def ready_queue_size(self) -> int: + return self._state.ready_queue.qsize() + + @property + def exceptions_count(self) -> int: + return self._state.graph_execution.exceptions_count + def get_output(self, key: str, default: Any = None) -> Any: - """Get a single output value (returns a copy).""" return self._state.get_output(key, default) + + def dumps(self) -> str: + """Serialize the underlying runtime state for external persistence.""" + return self._state.dumps() diff --git a/api/core/workflow/entities/variable_pool.py b/api/core/workflow/runtime/variable_pool.py similarity index 95% rename from api/core/workflow/entities/variable_pool.py rename to api/core/workflow/runtime/variable_pool.py index 2dc00fd70b..5fd6e894f1 100644 --- a/api/core/workflow/entities/variable_pool.py +++ b/api/core/workflow/runtime/variable_pool.py @@ -1,6 +1,7 @@ import re from collections import defaultdict from collections.abc import Mapping, Sequence +from copy import deepcopy from typing import Annotated, Any, Union, cast from pydantic import BaseModel, Field @@ -235,6 +236,20 @@ class VariablePool(BaseModel): return segment return None + def get_by_prefix(self, prefix: str, /) -> Mapping[str, object]: + """Return a copy of all variables stored under the given node prefix.""" + + nodes = self.variable_dictionary.get(prefix) + if not nodes: + return {} + + result: dict[str, object] = {} + for key, variable in nodes.items(): + value = variable.value + result[key] = deepcopy(value) + + return result + def _add_system_variables(self, system_variable: SystemVariable): sys_var_mapping = system_variable.to_dict() for key, value in sys_var_mapping.items(): diff --git a/api/core/workflow/utils/condition/processor.py b/api/core/workflow/utils/condition/processor.py index f4bbe9c3c3..650a44c681 100644 --- a/api/core/workflow/utils/condition/processor.py +++ b/api/core/workflow/utils/condition/processor.py @@ -5,7 +5,7 @@ from typing import Literal, NamedTuple from core.file import FileAttribute, file_manager from core.variables import ArrayFileSegment from core.variables.segments import ArrayBooleanSegment, BooleanSegment -from core.workflow.entities import VariablePool +from core.workflow.runtime import VariablePool from .entities import Condition, SubCondition, SupportedComparisonOperator diff --git a/api/core/workflow/variable_loader.py b/api/core/workflow/variable_loader.py index 1b31022495..ea0bdc3537 100644 --- a/api/core/workflow/variable_loader.py +++ b/api/core/workflow/variable_loader.py @@ -4,7 +4,7 @@ from typing import Any, Protocol from core.variables import Variable from core.variables.consts import SELECTORS_LENGTH -from core.workflow.entities.variable_pool import VariablePool +from core.workflow.runtime import VariablePool class VariableLoader(Protocol): diff --git a/api/core/workflow/workflow_cycle_manager.py b/api/core/workflow/workflow_cycle_manager.py deleted file mode 100644 index a88f350a9e..0000000000 --- a/api/core/workflow/workflow_cycle_manager.py +++ /dev/null @@ -1,459 +0,0 @@ -from collections.abc import Mapping -from dataclasses import dataclass -from datetime import datetime -from typing import Any, Union - -from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity -from core.app.entities.queue_entities import ( - QueueNodeExceptionEvent, - QueueNodeFailedEvent, - QueueNodeRetryEvent, - QueueNodeStartedEvent, - QueueNodeSucceededEvent, -) -from core.app.task_pipeline.exc import WorkflowRunNotFoundError -from core.ops.entities.trace_entity import TraceTaskName -from core.ops.ops_trace_manager import TraceQueueManager, TraceTask -from core.workflow.entities import ( - WorkflowExecution, - WorkflowNodeExecution, -) -from core.workflow.enums import ( - SystemVariableKey, - WorkflowExecutionStatus, - WorkflowNodeExecutionMetadataKey, - WorkflowNodeExecutionStatus, - WorkflowType, -) -from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository -from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository -from core.workflow.system_variable import SystemVariable -from core.workflow.workflow_entry import WorkflowEntry -from libs.datetime_utils import naive_utc_now -from libs.uuid_utils import uuidv7 - - -@dataclass -class CycleManagerWorkflowInfo: - workflow_id: str - workflow_type: WorkflowType - version: str - graph_data: Mapping[str, Any] - - -class WorkflowCycleManager: - def __init__( - self, - *, - application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity], - workflow_system_variables: SystemVariable, - workflow_info: CycleManagerWorkflowInfo, - workflow_execution_repository: WorkflowExecutionRepository, - workflow_node_execution_repository: WorkflowNodeExecutionRepository, - ): - self._application_generate_entity = application_generate_entity - self._workflow_system_variables = workflow_system_variables - self._workflow_info = workflow_info - self._workflow_execution_repository = workflow_execution_repository - self._workflow_node_execution_repository = workflow_node_execution_repository - - # Initialize caches for workflow execution cycle - # These caches avoid redundant repository calls during a single workflow execution - self._workflow_execution_cache: dict[str, WorkflowExecution] = {} - self._node_execution_cache: dict[str, WorkflowNodeExecution] = {} - - def handle_workflow_run_start(self) -> WorkflowExecution: - inputs = self._prepare_workflow_inputs() - execution_id = self._get_or_generate_execution_id() - - execution = WorkflowExecution.new( - id_=execution_id, - workflow_id=self._workflow_info.workflow_id, - workflow_type=self._workflow_info.workflow_type, - workflow_version=self._workflow_info.version, - graph=self._workflow_info.graph_data, - inputs=inputs, - started_at=naive_utc_now(), - ) - - return self._save_and_cache_workflow_execution(execution) - - def handle_workflow_run_success( - self, - *, - workflow_run_id: str, - total_tokens: int, - total_steps: int, - outputs: Mapping[str, Any] | None = None, - conversation_id: str | None = None, - trace_manager: TraceQueueManager | None = None, - external_trace_id: str | None = None, - ) -> WorkflowExecution: - workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id) - - self._update_workflow_execution_completion( - workflow_execution, - status=WorkflowExecutionStatus.SUCCEEDED, - outputs=outputs, - total_tokens=total_tokens, - total_steps=total_steps, - ) - - self._add_trace_task_if_needed(trace_manager, workflow_execution, conversation_id, external_trace_id) - - self._workflow_execution_repository.save(workflow_execution) - return workflow_execution - - def handle_workflow_run_partial_success( - self, - *, - workflow_run_id: str, - total_tokens: int, - total_steps: int, - outputs: Mapping[str, Any] | None = None, - exceptions_count: int = 0, - conversation_id: str | None = None, - trace_manager: TraceQueueManager | None = None, - external_trace_id: str | None = None, - ) -> WorkflowExecution: - execution = self._get_workflow_execution_or_raise_error(workflow_run_id) - - self._update_workflow_execution_completion( - execution, - status=WorkflowExecutionStatus.PARTIAL_SUCCEEDED, - outputs=outputs, - total_tokens=total_tokens, - total_steps=total_steps, - exceptions_count=exceptions_count, - ) - - self._add_trace_task_if_needed(trace_manager, execution, conversation_id, external_trace_id) - - self._workflow_execution_repository.save(execution) - return execution - - def handle_workflow_run_failed( - self, - *, - workflow_run_id: str, - total_tokens: int, - total_steps: int, - status: WorkflowExecutionStatus, - error_message: str, - conversation_id: str | None = None, - trace_manager: TraceQueueManager | None = None, - exceptions_count: int = 0, - external_trace_id: str | None = None, - ) -> WorkflowExecution: - workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id) - now = naive_utc_now() - - self._update_workflow_execution_completion( - workflow_execution, - status=status, - total_tokens=total_tokens, - total_steps=total_steps, - error_message=error_message, - exceptions_count=exceptions_count, - finished_at=now, - ) - - self._fail_running_node_executions(workflow_execution.id_, error_message, now) - self._add_trace_task_if_needed(trace_manager, workflow_execution, conversation_id, external_trace_id) - - self._workflow_execution_repository.save(workflow_execution) - return workflow_execution - - def handle_node_execution_start( - self, - *, - workflow_execution_id: str, - event: QueueNodeStartedEvent, - ) -> WorkflowNodeExecution: - workflow_execution = self._get_workflow_execution_or_raise_error(workflow_execution_id) - - domain_execution = self._create_node_execution_from_event( - workflow_execution=workflow_execution, - event=event, - status=WorkflowNodeExecutionStatus.RUNNING, - ) - - return self._save_and_cache_node_execution(domain_execution) - - def handle_workflow_node_execution_success(self, *, event: QueueNodeSucceededEvent) -> WorkflowNodeExecution: - domain_execution = self._get_node_execution_from_cache(event.node_execution_id) - - self._update_node_execution_completion( - domain_execution, - event=event, - status=WorkflowNodeExecutionStatus.SUCCEEDED, - ) - - self._workflow_node_execution_repository.save(domain_execution) - self._workflow_node_execution_repository.save_execution_data(domain_execution) - return domain_execution - - def handle_workflow_node_execution_failed( - self, - *, - event: QueueNodeFailedEvent | QueueNodeExceptionEvent, - ) -> WorkflowNodeExecution: - """ - Workflow node execution failed - :param event: queue node failed event - :return: - """ - domain_execution = self._get_node_execution_from_cache(event.node_execution_id) - - status = ( - WorkflowNodeExecutionStatus.EXCEPTION - if isinstance(event, QueueNodeExceptionEvent) - else WorkflowNodeExecutionStatus.FAILED - ) - - self._update_node_execution_completion( - domain_execution, - event=event, - status=status, - error=event.error, - handle_special_values=True, - ) - - self._workflow_node_execution_repository.save(domain_execution) - self._workflow_node_execution_repository.save_execution_data(domain_execution) - return domain_execution - - def handle_workflow_node_execution_retried( - self, *, workflow_execution_id: str, event: QueueNodeRetryEvent - ) -> WorkflowNodeExecution: - workflow_execution = self._get_workflow_execution_or_raise_error(workflow_execution_id) - - domain_execution = self._create_node_execution_from_event( - workflow_execution=workflow_execution, - event=event, - status=WorkflowNodeExecutionStatus.RETRY, - error=event.error, - created_at=event.start_at, - ) - - # Handle inputs and outputs - inputs = WorkflowEntry.handle_special_values(event.inputs) - outputs = event.outputs - metadata = self._merge_event_metadata(event) - - domain_execution.update_from_mapping(inputs=inputs, outputs=outputs, metadata=metadata) - - execution = self._save_and_cache_node_execution(domain_execution) - self._workflow_node_execution_repository.save_execution_data(execution) - return execution - - def _get_workflow_execution_or_raise_error(self, id: str, /) -> WorkflowExecution: - # Check cache first - if id in self._workflow_execution_cache: - return self._workflow_execution_cache[id] - - raise WorkflowRunNotFoundError(id) - - def _prepare_workflow_inputs(self) -> dict[str, Any]: - """Prepare workflow inputs by merging application inputs with system variables.""" - inputs = {**self._application_generate_entity.inputs} - - if self._workflow_system_variables: - for field_name, value in self._workflow_system_variables.to_dict().items(): - if field_name != SystemVariableKey.CONVERSATION_ID: - inputs[f"sys.{field_name}"] = value - - return dict(WorkflowEntry.handle_special_values(inputs) or {}) - - def _get_or_generate_execution_id(self) -> str: - """Get execution ID from system variables or generate a new one.""" - if self._workflow_system_variables and self._workflow_system_variables.workflow_execution_id: - return str(self._workflow_system_variables.workflow_execution_id) - return str(uuidv7()) - - def _save_and_cache_workflow_execution(self, execution: WorkflowExecution) -> WorkflowExecution: - """Save workflow execution to repository and cache it.""" - self._workflow_execution_repository.save(execution) - self._workflow_execution_cache[execution.id_] = execution - return execution - - def _save_and_cache_node_execution(self, execution: WorkflowNodeExecution) -> WorkflowNodeExecution: - """Save node execution to repository and cache it if it has an ID. - - This does not persist the `inputs` / `process_data` / `outputs` fields of the execution model. - """ - self._workflow_node_execution_repository.save(execution) - if execution.node_execution_id: - self._node_execution_cache[execution.node_execution_id] = execution - return execution - - def _get_node_execution_from_cache(self, node_execution_id: str) -> WorkflowNodeExecution: - """Get node execution from cache or raise error if not found.""" - domain_execution = self._node_execution_cache.get(node_execution_id) - if not domain_execution: - raise ValueError(f"Domain node execution not found: {node_execution_id}") - return domain_execution - - def _update_workflow_execution_completion( - self, - execution: WorkflowExecution, - *, - status: WorkflowExecutionStatus, - total_tokens: int, - total_steps: int, - outputs: Mapping[str, Any] | None = None, - error_message: str | None = None, - exceptions_count: int = 0, - finished_at: datetime | None = None, - ): - """Update workflow execution with completion data.""" - execution.status = status - execution.outputs = outputs or {} - execution.total_tokens = total_tokens - execution.total_steps = total_steps - execution.finished_at = finished_at or naive_utc_now() - execution.exceptions_count = exceptions_count - if error_message: - execution.error_message = error_message - - def _add_trace_task_if_needed( - self, - trace_manager: TraceQueueManager | None, - workflow_execution: WorkflowExecution, - conversation_id: str | None, - external_trace_id: str | None, - ): - """Add trace task if trace manager is provided.""" - if trace_manager: - trace_manager.add_trace_task( - TraceTask( - TraceTaskName.WORKFLOW_TRACE, - workflow_execution=workflow_execution, - conversation_id=conversation_id, - user_id=trace_manager.user_id, - external_trace_id=external_trace_id, - ) - ) - - def _fail_running_node_executions( - self, - workflow_execution_id: str, - error_message: str, - now: datetime, - ): - """Fail all running node executions for a workflow.""" - running_node_executions = [ - node_exec - for node_exec in self._node_execution_cache.values() - if node_exec.workflow_execution_id == workflow_execution_id - and node_exec.status == WorkflowNodeExecutionStatus.RUNNING - ] - - for node_execution in running_node_executions: - if node_execution.node_execution_id: - node_execution.status = WorkflowNodeExecutionStatus.FAILED - node_execution.error = error_message - node_execution.finished_at = now - node_execution.elapsed_time = (now - node_execution.created_at).total_seconds() - self._workflow_node_execution_repository.save(node_execution) - - def _create_node_execution_from_event( - self, - *, - workflow_execution: WorkflowExecution, - event: QueueNodeStartedEvent, - status: WorkflowNodeExecutionStatus, - error: str | None = None, - created_at: datetime | None = None, - ) -> WorkflowNodeExecution: - """Create a node execution from an event.""" - now = naive_utc_now() - created_at = created_at or now - - metadata = { - WorkflowNodeExecutionMetadataKey.PARALLEL_MODE_RUN_ID: event.parallel_mode_run_id, - WorkflowNodeExecutionMetadataKey.ITERATION_ID: event.in_iteration_id, - WorkflowNodeExecutionMetadataKey.LOOP_ID: event.in_loop_id, - } - - domain_execution = WorkflowNodeExecution( - id=event.node_execution_id, - workflow_id=workflow_execution.workflow_id, - workflow_execution_id=workflow_execution.id_, - predecessor_node_id=event.predecessor_node_id, - index=event.node_run_index, - node_execution_id=event.node_execution_id, - node_id=event.node_id, - node_type=event.node_type, - title=event.node_title, - status=status, - metadata=metadata, - created_at=created_at, - error=error, - ) - - if status == WorkflowNodeExecutionStatus.RETRY: - domain_execution.finished_at = now - domain_execution.elapsed_time = (now - created_at).total_seconds() - - return domain_execution - - def _update_node_execution_completion( - self, - domain_execution: WorkflowNodeExecution, - *, - event: Union[ - QueueNodeSucceededEvent, - QueueNodeFailedEvent, - QueueNodeExceptionEvent, - ], - status: WorkflowNodeExecutionStatus, - error: str | None = None, - handle_special_values: bool = False, - ): - """Update node execution with completion data.""" - finished_at = naive_utc_now() - elapsed_time = (finished_at - event.start_at).total_seconds() - - # Process data - if handle_special_values: - inputs = WorkflowEntry.handle_special_values(event.inputs) - process_data = WorkflowEntry.handle_special_values(event.process_data) - else: - inputs = event.inputs - process_data = event.process_data - - outputs = event.outputs - - # Convert metadata - execution_metadata_dict: dict[WorkflowNodeExecutionMetadataKey, Any] = {} - if event.execution_metadata: - execution_metadata_dict.update(event.execution_metadata) - - # Update domain model - domain_execution.status = status - domain_execution.update_from_mapping( - inputs=inputs, - process_data=process_data, - outputs=outputs, - metadata=execution_metadata_dict, - ) - domain_execution.finished_at = finished_at - domain_execution.elapsed_time = elapsed_time - - if error: - domain_execution.error = error - - def _merge_event_metadata(self, event: QueueNodeRetryEvent) -> dict[WorkflowNodeExecutionMetadataKey, str | None]: - """Merge event metadata with origin metadata.""" - origin_metadata = { - WorkflowNodeExecutionMetadataKey.ITERATION_ID: event.in_iteration_id, - WorkflowNodeExecutionMetadataKey.PARALLEL_MODE_RUN_ID: event.parallel_mode_run_id, - WorkflowNodeExecutionMetadataKey.LOOP_ID: event.in_loop_id, - } - - execution_metadata_dict: dict[WorkflowNodeExecutionMetadataKey, str | None] = {} - if event.execution_metadata: - execution_metadata_dict.update(event.execution_metadata) - - return {**execution_metadata_dict, **origin_metadata} if execution_metadata_dict else origin_metadata diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index 4cd885cfa5..742c42ec2b 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -9,7 +9,7 @@ from core.app.apps.exc import GenerateTaskStoppedError from core.app.entities.app_invoke_entities import InvokeFrom from core.file.models import File from core.workflow.constants import ENVIRONMENT_VARIABLE_NODE_ID -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.errors import WorkflowNodeRunFailedError from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine @@ -20,6 +20,7 @@ from core.workflow.graph_events import GraphEngineEvent, GraphNodeEventBase, Gra from core.workflow.nodes import NodeType from core.workflow.nodes.base.node import Node from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader, load_into_variable_pool from factories import file_factory diff --git a/api/events/event_handlers/clean_when_dataset_deleted.py b/api/events/event_handlers/clean_when_dataset_deleted.py index 7caa2d1cc9..1666e2e29f 100644 --- a/api/events/event_handlers/clean_when_dataset_deleted.py +++ b/api/events/event_handlers/clean_when_dataset_deleted.py @@ -1,10 +1,13 @@ from events.dataset_event import dataset_was_deleted +from models import Dataset from tasks.clean_dataset_task import clean_dataset_task @dataset_was_deleted.connect -def handle(sender, **kwargs): +def handle(sender: Dataset, **kwargs): dataset = sender + if not dataset.doc_form or not dataset.indexing_technique: + return clean_dataset_task.delay( dataset.id, dataset.tenant_id, diff --git a/api/events/event_handlers/clean_when_document_deleted.py b/api/events/event_handlers/clean_when_document_deleted.py index bbc913b7cf..0add109b06 100644 --- a/api/events/event_handlers/clean_when_document_deleted.py +++ b/api/events/event_handlers/clean_when_document_deleted.py @@ -8,6 +8,6 @@ def handle(sender, **kwargs): dataset_id = kwargs.get("dataset_id") doc_form = kwargs.get("doc_form") file_id = kwargs.get("file_id") - assert dataset_id is not None - assert doc_form is not None + if not dataset_id or not doc_form: + return clean_document_task.delay(document_id, dataset_id, doc_form, file_id) diff --git a/api/events/event_handlers/update_provider_when_message_created.py b/api/events/event_handlers/update_provider_when_message_created.py index c0694d4efe..e1c96fb050 100644 --- a/api/events/event_handlers/update_provider_when_message_created.py +++ b/api/events/event_handlers/update_provider_when_message_created.py @@ -1,10 +1,11 @@ import logging import time as time_module from datetime import datetime -from typing import Any +from typing import Any, cast from pydantic import BaseModel from sqlalchemy import update +from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session from configs import dify_config @@ -267,7 +268,7 @@ def _execute_provider_updates(updates_to_perform: list[_ProviderUpdateOperation] # Build and execute the update statement stmt = update(Provider).where(*where_conditions).values(**update_values) - result = session.execute(stmt) + result = cast(CursorResult, session.execute(stmt)) rows_affected = result.rowcount logger.debug( diff --git a/api/extensions/ext_blueprints.py b/api/extensions/ext_blueprints.py index 9c08a08c45..52fef4929f 100644 --- a/api/extensions/ext_blueprints.py +++ b/api/extensions/ext_blueprints.py @@ -1,4 +1,5 @@ from configs import dify_config +from constants import HEADER_NAME_APP_CODE, HEADER_NAME_CSRF_TOKEN from dify_app import DifyApp @@ -16,7 +17,7 @@ def init_app(app: DifyApp): CORS( service_api_bp, - allow_headers=["Content-Type", "Authorization", "X-App-Code"], + allow_headers=["Content-Type", "Authorization", HEADER_NAME_APP_CODE], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], ) app.register_blueprint(service_api_bp) @@ -25,7 +26,7 @@ def init_app(app: DifyApp): web_bp, resources={r"/*": {"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS}}, supports_credentials=True, - allow_headers=["Content-Type", "Authorization", "X-App-Code"], + allow_headers=["Content-Type", "Authorization", HEADER_NAME_APP_CODE, HEADER_NAME_CSRF_TOKEN], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], expose_headers=["X-Version", "X-Env"], ) @@ -35,7 +36,7 @@ def init_app(app: DifyApp): console_app_bp, resources={r"/*": {"origins": dify_config.CONSOLE_CORS_ALLOW_ORIGINS}}, supports_credentials=True, - allow_headers=["Content-Type", "Authorization"], + allow_headers=["Content-Type", "Authorization", HEADER_NAME_CSRF_TOKEN], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], expose_headers=["X-Version", "X-Env"], ) @@ -43,7 +44,7 @@ def init_app(app: DifyApp): CORS( files_bp, - allow_headers=["Content-Type"], + allow_headers=["Content-Type", HEADER_NAME_CSRF_TOKEN], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], ) app.register_blueprint(files_bp) diff --git a/api/extensions/ext_login.py b/api/extensions/ext_login.py index 5571c0d9ba..e7816a2e88 100644 --- a/api/extensions/ext_login.py +++ b/api/extensions/ext_login.py @@ -9,7 +9,8 @@ from configs import dify_config from dify_app import DifyApp from extensions.ext_database import db from libs.passport import PassportService -from models.account import Account, Tenant, TenantAccountJoin +from libs.token import extract_access_token +from models import Account, Tenant, TenantAccountJoin from models.model import AppMCPServer, EndUser from services.account_service import AccountService @@ -24,20 +25,10 @@ def load_user_from_request(request_from_flask_login): if dify_config.SWAGGER_UI_ENABLED and request.path.endswith((dify_config.SWAGGER_UI_PATH, "/swagger.json")): return None - auth_header = request.headers.get("Authorization", "") - auth_token: str | None = None - if auth_header: - if " " not in auth_header: - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - auth_scheme, auth_token = auth_header.split(maxsplit=1) - auth_scheme = auth_scheme.lower() - if auth_scheme != "bearer": - raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - else: - auth_token = request.args.get("_token") + auth_token = extract_access_token(request) # Check for admin API key authentication first - if dify_config.ADMIN_API_KEY_ENABLE and auth_header: + if dify_config.ADMIN_API_KEY_ENABLE and auth_token: admin_api_key = dify_config.ADMIN_API_KEY if admin_api_key and admin_api_key == auth_token: workspace_id = request.headers.get("X-WORKSPACE-ID") diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py index 69fd1a6da3..2316e45179 100644 --- a/api/factories/file_factory.py +++ b/api/factories/file_factory.py @@ -21,7 +21,7 @@ def build_from_message_files( *, message_files: Sequence["MessageFile"], tenant_id: str, - config: FileUploadConfig, + config: FileUploadConfig | None = None, ) -> Sequence[File]: results = [ build_from_message_file(message_file=file, tenant_id=tenant_id, config=config) @@ -35,15 +35,18 @@ def build_from_message_file( *, message_file: "MessageFile", tenant_id: str, - config: FileUploadConfig, + config: FileUploadConfig | None, ): mapping = { "transfer_method": message_file.transfer_method, "url": message_file.url, - "id": message_file.id, "type": message_file.type, } + # Only include id if it exists (message_file has been committed to DB) + if message_file.id: + mapping["id"] = message_file.id + # Set the correct ID field based on transfer method if message_file.transfer_method == FileTransferMethod.TOOL_FILE: mapping["tool_file_id"] = message_file.upload_file_id @@ -64,7 +67,10 @@ def build_from_mapping( config: FileUploadConfig | None = None, strict_type_validation: bool = False, ) -> File: - transfer_method = FileTransferMethod.value_of(mapping.get("transfer_method")) + transfer_method_value = mapping.get("transfer_method") + if not transfer_method_value: + raise ValueError("transfer_method is required in file mapping") + transfer_method = FileTransferMethod.value_of(transfer_method_value) build_functions: dict[FileTransferMethod, Callable] = { FileTransferMethod.LOCAL_FILE: _build_from_local_file, @@ -104,6 +110,8 @@ def build_from_mappings( ) -> Sequence[File]: # TODO(QuantumGhost): Performance concern - each mapping triggers a separate database query. # Implement batch processing to reduce database load when handling multiple files. + # Filter out None/empty mappings to avoid errors + valid_mappings = [m for m in mappings if m and m.get("transfer_method")] files = [ build_from_mapping( mapping=mapping, @@ -111,7 +119,7 @@ def build_from_mappings( config=config, strict_type_validation=strict_type_validation, ) - for mapping in mappings + for mapping in valid_mappings ] if ( @@ -158,7 +166,10 @@ def _build_from_local_file( if strict_type_validation and detected_file_type.value != specified_type: raise ValueError("Detected file type does not match the specified type. Please verify the file.") - file_type = FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM else detected_file_type + if specified_type and specified_type != "custom": + file_type = FileType(specified_type) + else: + file_type = detected_file_type return File( id=mapping.get("id"), @@ -206,9 +217,10 @@ def _build_from_remote_url( if strict_type_validation and specified_type and detected_file_type.value != specified_type: raise ValueError("Detected file type does not match the specified type. Please verify the file.") - file_type = ( - FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM else detected_file_type - ) + if specified_type and specified_type != "custom": + file_type = FileType(specified_type) + else: + file_type = detected_file_type return File( id=mapping.get("id"), @@ -230,10 +242,17 @@ def _build_from_remote_url( mime_type, filename, file_size = _get_remote_file_info(url) extension = mimetypes.guess_extension(mime_type) or ("." + filename.split(".")[-1] if "." in filename else ".bin") - file_type = _standardize_file_type(extension=extension, mime_type=mime_type) - if file_type.value != mapping.get("type", "custom"): + detected_file_type = _standardize_file_type(extension=extension, mime_type=mime_type) + specified_type = mapping.get("type") + + if strict_type_validation and specified_type and detected_file_type.value != specified_type: raise ValueError("Detected file type does not match the specified type. Please verify the file.") + if specified_type and specified_type != "custom": + file_type = FileType(specified_type) + else: + file_type = detected_file_type + return File( id=mapping.get("id"), filename=filename, @@ -323,7 +342,10 @@ def _build_from_tool_file( if strict_type_validation and specified_type and detected_file_type.value != specified_type: raise ValueError("Detected file type does not match the specified type. Please verify the file.") - file_type = FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM else detected_file_type + if specified_type and specified_type != "custom": + file_type = FileType(specified_type) + else: + file_type = detected_file_type return File( id=mapping.get("id"), @@ -368,7 +390,10 @@ def _build_from_datasource_file( if strict_type_validation and specified_type and detected_file_type.value != specified_type: raise ValueError("Detected file type does not match the specified type. Please verify the file.") - file_type = FileType(specified_type) if specified_type and specified_type != FileType.CUSTOM else detected_file_type + if specified_type and specified_type != "custom": + file_type = FileType(specified_type) + else: + file_type = detected_file_type return File( id=mapping.get("datasource_file_id"), diff --git a/api/fields/workflow_run_fields.py b/api/fields/workflow_run_fields.py index 649e881848..79594beeed 100644 --- a/api/fields/workflow_run_fields.py +++ b/api/fields/workflow_run_fields.py @@ -64,6 +64,15 @@ workflow_run_pagination_fields = { "data": fields.List(fields.Nested(workflow_run_for_list_fields), attribute="data"), } +workflow_run_count_fields = { + "total": fields.Integer, + "running": fields.Integer, + "succeeded": fields.Integer, + "failed": fields.Integer, + "stopped": fields.Integer, + "partial_succeeded": fields.Integer(attribute="partial-succeeded"), +} + workflow_run_detail_fields = { "id": fields.String, "version": fields.String, diff --git a/api/libs/custom_inputs.py b/api/libs/custom_inputs.py new file mode 100644 index 0000000000..10d550ed65 --- /dev/null +++ b/api/libs/custom_inputs.py @@ -0,0 +1,32 @@ +"""Custom input types for Flask-RESTX request parsing.""" + +import re + + +def time_duration(value: str) -> str: + """ + Validate and return time duration string. + + Accepts formats: d (days), h (hours), m (minutes), s (seconds) + Examples: 7d, 4h, 30m, 30s + + Args: + value: The time duration string + + Returns: + The validated time duration string + + Raises: + ValueError: If the format is invalid + """ + if not value: + raise ValueError("Time duration cannot be empty") + + pattern = r"^(\d+)([dhms])$" + if not re.match(pattern, value.lower()): + raise ValueError( + "Invalid time duration format. Use: d (days), h (hours), " + "m (minutes), or s (seconds). Examples: 7d, 4h, 30m, 30s" + ) + + return value.lower() diff --git a/api/libs/external_api.py b/api/libs/external_api.py index 25a82f8a96..f3ebcc4306 100644 --- a/api/libs/external_api.py +++ b/api/libs/external_api.py @@ -9,7 +9,9 @@ from werkzeug.exceptions import HTTPException from werkzeug.http import HTTP_STATUS_CODES from configs import dify_config +from constants import COOKIE_NAME_ACCESS_TOKEN, COOKIE_NAME_CSRF_TOKEN, COOKIE_NAME_REFRESH_TOKEN from core.errors.error import AppInvokeQuotaExceededError +from libs.token import is_secure def http_status_message(code): @@ -22,7 +24,7 @@ def register_external_error_handlers(api: Api): got_request_exception.send(current_app, exception=e) # If Werkzeug already prepared a Response, just use it. - if getattr(e, "response", None) is not None: + if e.response is not None: return e.response status_code = getattr(e, "code", 500) or 500 @@ -67,6 +69,19 @@ def register_external_error_handlers(api: Api): # If you need WWW-Authenticate for 401, add it to headers if status_code == 401: headers["WWW-Authenticate"] = 'Bearer realm="api"' + # Check if this is a forced logout error - clear cookies + error_code = getattr(e, "error_code", None) + if error_code == "unauthorized_and_force_logout": + # Add Set-Cookie headers to clear auth cookies + + secure = is_secure() + # response is not accessible, so we need to do it ugly + common_part = "Path=/; Expires=Thu, 01 Jan 1970 00:00:00 GMT; HttpOnly" + headers["Set-Cookie"] = [ + f'{COOKIE_NAME_ACCESS_TOKEN}=""; {common_part}{"; Secure" if secure else ""}; SameSite=Lax', + f'{COOKIE_NAME_CSRF_TOKEN}=""; {common_part}{"; Secure" if secure else ""}; SameSite=Lax', + f'{COOKIE_NAME_REFRESH_TOKEN}=""; {common_part}{"; Secure" if secure else ""}; SameSite=Lax', + ] return data, status_code, headers _ = handle_http_exception @@ -106,7 +121,7 @@ def register_external_error_handlers(api: Api): # Log stack exc_info: Any = sys.exc_info() if exc_info[1] is None: - exc_info = None + exc_info = (None, None, None) current_app.log_exception(exc_info) return data, status_code diff --git a/api/libs/helper.py b/api/libs/helper.py index 0551470f65..b878141d8e 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -24,7 +24,7 @@ from core.model_runtime.utils.encoders import jsonable_encoder from extensions.ext_redis import redis_client if TYPE_CHECKING: - from models.account import Account + from models import Account from models.model import EndUser logger = logging.getLogger(__name__) @@ -43,7 +43,7 @@ def extract_tenant_id(user: Union["Account", "EndUser"]) -> str | None: Raises: ValueError: If user is neither Account nor EndUser """ - from models.account import Account + from models import Account from models.model import EndUser if isinstance(user, Account): @@ -78,7 +78,7 @@ class AvatarUrlField(fields.Raw): if obj is None: return None - from models.account import Account + from models import Account if isinstance(obj, Account) and obj.avatar is not None: return file_helpers.get_signed_file_url(obj.avatar) diff --git a/api/libs/json_in_md_parser.py b/api/libs/json_in_md_parser.py index 0c642041bf..310e677747 100644 --- a/api/libs/json_in_md_parser.py +++ b/api/libs/json_in_md_parser.py @@ -6,22 +6,22 @@ from core.llm_generator.output_parser.errors import OutputParserError def parse_json_markdown(json_string: str): # Get json from the backticks/braces json_string = json_string.strip() - starts = ["```json", "```", "``", "`", "{"] - ends = ["```", "``", "`", "}"] + starts = ["```json", "```", "``", "`", "{", "["] + ends = ["```", "``", "`", "}", "]"] end_index = -1 start_index = 0 parsed: dict = {} for s in starts: start_index = json_string.find(s) if start_index != -1: - if json_string[start_index] != "{": + if json_string[start_index] not in ("{", "["): start_index += len(s) break if start_index != -1: for e in ends: end_index = json_string.rfind(e, start_index) if end_index != -1: - if json_string[end_index] == "}": + if json_string[end_index] in ("}", "]"): end_index += 1 break if start_index != -1 and end_index != -1 and start_index < end_index: @@ -38,6 +38,12 @@ def parse_and_check_json_markdown(text: str, expected_keys: list[str]): json_obj = parse_json_markdown(text) except json.JSONDecodeError as e: raise OutputParserError(f"got invalid json object. error: {e}") + + if isinstance(json_obj, list): + if len(json_obj) == 1 and isinstance(json_obj[0], dict): + json_obj = json_obj[0] + else: + raise OutputParserError(f"got invalid return object. obj:{json_obj}") for key in expected_keys: if key not in json_obj: raise OutputParserError( diff --git a/api/libs/login.py b/api/libs/login.py index 0535f52ea1..5ed4bfae8f 100644 --- a/api/libs/login.py +++ b/api/libs/login.py @@ -1,18 +1,33 @@ from collections.abc import Callable from functools import wraps -from typing import Union, cast +from typing import Any from flask import current_app, g, has_request_context, request from flask_login.config import EXEMPT_METHODS # type: ignore from werkzeug.local import LocalProxy from configs import dify_config -from models.account import Account +from libs.token import check_csrf_token +from models import Account from models.model import EndUser -#: A proxy for the current user. If no user is logged in, this will be an -#: anonymous user -current_user = cast(Union[Account, EndUser, None], LocalProxy(lambda: _get_user())) + +def current_account_with_tenant(): + """ + Resolve the underlying account for the current user proxy and ensure tenant context exists. + Allows tests to supply plain Account mocks without the LocalProxy helper. + """ + user_proxy = current_user + + get_current_object = getattr(user_proxy, "_get_current_object", None) + user = get_current_object() if callable(get_current_object) else user_proxy # type: ignore + + if not isinstance(user, Account): + raise ValueError("current_user must be an Account instance") + assert user.current_tenant_id is not None, "The tenant information should be loaded." + return user, user.current_tenant_id + + from typing import ParamSpec, TypeVar P = ParamSpec("P") @@ -59,6 +74,9 @@ def login_required(func: Callable[P, R]): pass elif current_user is not None and not current_user.is_authenticated: return current_app.login_manager.unauthorized() # type: ignore + # we put csrf validation here for less conflicts + # TODO: maybe find a better place for it. + check_csrf_token(request, current_user.id) return current_app.ensure_sync(func)(*args, **kwargs) return decorated_view @@ -72,3 +90,9 @@ def _get_user() -> EndUser | Account | None: return g._login_user # type: ignore return None + + +#: A proxy for the current user. If no user is logged in, this will be an +#: anonymous user +# NOTE: Any here, but use _get_current_object to check the fields +current_user: Any = LocalProxy(lambda: _get_user()) diff --git a/api/libs/time_parser.py b/api/libs/time_parser.py new file mode 100644 index 0000000000..1d9dd92a08 --- /dev/null +++ b/api/libs/time_parser.py @@ -0,0 +1,67 @@ +"""Time duration parser utility.""" + +import re +from datetime import UTC, datetime, timedelta + + +def parse_time_duration(duration_str: str) -> timedelta | None: + """ + Parse time duration string to timedelta. + + Supported formats: + - 7d: 7 days + - 4h: 4 hours + - 30m: 30 minutes + - 30s: 30 seconds + + Args: + duration_str: Duration string (e.g., "7d", "4h", "30m", "30s") + + Returns: + timedelta object or None if invalid format + """ + if not duration_str: + return None + + # Pattern: number followed by unit (d, h, m, s) + pattern = r"^(\d+)([dhms])$" + match = re.match(pattern, duration_str.lower()) + + if not match: + return None + + value = int(match.group(1)) + unit = match.group(2) + + if unit == "d": + return timedelta(days=value) + elif unit == "h": + return timedelta(hours=value) + elif unit == "m": + return timedelta(minutes=value) + elif unit == "s": + return timedelta(seconds=value) + + return None + + +def get_time_threshold(duration_str: str | None) -> datetime | None: + """ + Get datetime threshold from duration string. + + Calculates the datetime that is duration_str ago from now. + + Args: + duration_str: Duration string (e.g., "7d", "4h", "30m", "30s") + + Returns: + datetime object representing the threshold time, or None if no duration + """ + if not duration_str: + return None + + duration = parse_time_duration(duration_str) + if duration is None: + return None + + return datetime.now(UTC) - duration diff --git a/api/libs/token.py b/api/libs/token.py new file mode 100644 index 0000000000..4be25696e7 --- /dev/null +++ b/api/libs/token.py @@ -0,0 +1,208 @@ +import logging +import re +from datetime import UTC, datetime, timedelta + +from flask import Request +from werkzeug.exceptions import Unauthorized +from werkzeug.wrappers import Response + +from configs import dify_config +from constants import ( + COOKIE_NAME_ACCESS_TOKEN, + COOKIE_NAME_CSRF_TOKEN, + COOKIE_NAME_PASSPORT, + COOKIE_NAME_REFRESH_TOKEN, + HEADER_NAME_CSRF_TOKEN, + HEADER_NAME_PASSPORT, +) +from libs.passport import PassportService + +logger = logging.getLogger(__name__) + +CSRF_WHITE_LIST = [ + re.compile(r"/console/api/apps/[a-f0-9-]+/workflows/draft"), +] + + +# server is behind a reverse proxy, so we need to check the url +def is_secure() -> bool: + return dify_config.CONSOLE_WEB_URL.startswith("https") and dify_config.CONSOLE_API_URL.startswith("https") + + +def _real_cookie_name(cookie_name: str) -> str: + if is_secure(): + return "__Host-" + cookie_name + else: + return cookie_name + + +def _try_extract_from_header(request: Request) -> str | None: + """ + Try to extract access token from header + """ + auth_header = request.headers.get("Authorization") + if auth_header: + if " " not in auth_header: + return None + else: + auth_scheme, auth_token = auth_header.split(None, 1) + auth_scheme = auth_scheme.lower() + if auth_scheme != "bearer": + return None + else: + return auth_token + return None + + +def extract_csrf_token(request: Request) -> str | None: + """ + Try to extract CSRF token from header or cookie. + """ + return request.headers.get(HEADER_NAME_CSRF_TOKEN) + + +def extract_csrf_token_from_cookie(request: Request) -> str | None: + """ + Try to extract CSRF token from cookie. + """ + return request.cookies.get(_real_cookie_name(COOKIE_NAME_CSRF_TOKEN)) + + +def extract_access_token(request: Request) -> str | None: + """ + Try to extract access token from cookie, header or params. + + Access token is either for console session or webapp passport exchange. + """ + + def _try_extract_from_cookie(request: Request) -> str | None: + return request.cookies.get(_real_cookie_name(COOKIE_NAME_ACCESS_TOKEN)) + + return _try_extract_from_cookie(request) or _try_extract_from_header(request) + + +def extract_webapp_passport(app_code: str, request: Request) -> str | None: + """ + Try to extract app token from header or params. + + Webapp access token (part of passport) is only used for webapp session. + """ + + def _try_extract_passport_token_from_cookie(request: Request) -> str | None: + return request.cookies.get(_real_cookie_name(COOKIE_NAME_PASSPORT + "-" + app_code)) + + def _try_extract_passport_token_from_header(request: Request) -> str | None: + return request.headers.get(HEADER_NAME_PASSPORT) + + ret = _try_extract_passport_token_from_cookie(request) or _try_extract_passport_token_from_header(request) + return ret + + +def set_access_token_to_cookie(request: Request, response: Response, token: str, samesite: str = "Lax"): + response.set_cookie( + _real_cookie_name(COOKIE_NAME_ACCESS_TOKEN), + value=token, + httponly=True, + secure=is_secure(), + samesite=samesite, + max_age=int(dify_config.ACCESS_TOKEN_EXPIRE_MINUTES * 60), + path="/", + ) + + +def set_refresh_token_to_cookie(request: Request, response: Response, token: str): + response.set_cookie( + _real_cookie_name(COOKIE_NAME_REFRESH_TOKEN), + value=token, + httponly=True, + secure=is_secure(), + samesite="Lax", + max_age=int(60 * 60 * 24 * dify_config.REFRESH_TOKEN_EXPIRE_DAYS), + path="/", + ) + + +def set_csrf_token_to_cookie(request: Request, response: Response, token: str): + response.set_cookie( + _real_cookie_name(COOKIE_NAME_CSRF_TOKEN), + value=token, + httponly=False, + secure=is_secure(), + samesite="Lax", + max_age=int(60 * dify_config.ACCESS_TOKEN_EXPIRE_MINUTES), + path="/", + ) + + +def _clear_cookie( + response: Response, + cookie_name: str, + samesite: str = "Lax", + http_only: bool = True, +): + response.set_cookie( + _real_cookie_name(cookie_name), + "", + expires=0, + path="/", + secure=is_secure(), + httponly=http_only, + samesite=samesite, + ) + + +def clear_access_token_from_cookie(response: Response, samesite: str = "Lax"): + _clear_cookie(response, COOKIE_NAME_ACCESS_TOKEN, samesite) + + +def clear_refresh_token_from_cookie(response: Response): + _clear_cookie(response, COOKIE_NAME_REFRESH_TOKEN) + + +def clear_csrf_token_from_cookie(response: Response): + _clear_cookie(response, COOKIE_NAME_CSRF_TOKEN, http_only=False) + + +def check_csrf_token(request: Request, user_id: str): + # some apis are sent by beacon, so we need to bypass csrf token check + # since these APIs are post, they are already protected by SameSite: Lax, so csrf is not required. + def _unauthorized(): + raise Unauthorized("CSRF token is missing or invalid.") + + for pattern in CSRF_WHITE_LIST: + if pattern.match(request.path): + return + + csrf_token = extract_csrf_token(request) + csrf_token_from_cookie = extract_csrf_token_from_cookie(request) + + if csrf_token != csrf_token_from_cookie: + _unauthorized() + + if not csrf_token: + _unauthorized() + verified = {} + try: + verified = PassportService().verify(csrf_token) + except: + _unauthorized() + + if verified.get("sub") != user_id: + _unauthorized() + + exp: int | None = verified.get("exp") + if not exp: + _unauthorized() + else: + time_now = int(datetime.now().timestamp()) + if exp < time_now: + _unauthorized() + + +def generate_csrf_token(user_id: str) -> str: + exp_dt = datetime.now(UTC) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES) + payload = { + "exp": int(exp_dt.timestamp()), + "sub": user_id, + } + return PassportService().issue(payload) diff --git a/api/migrations/versions/2025_10_14_1618-d98acf217d43_add_app_mode_for_messsage.py b/api/migrations/versions/2025_10_14_1618-d98acf217d43_add_app_mode_for_messsage.py new file mode 100644 index 0000000000..910cf75838 --- /dev/null +++ b/api/migrations/versions/2025_10_14_1618-d98acf217d43_add_app_mode_for_messsage.py @@ -0,0 +1,35 @@ +"""add app_mode for messsage + +Revision ID: d98acf217d43 +Revises: 68519ad5cd18 +Create Date: 2025-10-14 16:18:08.568011 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'd98acf217d43' +down_revision = '68519ad5cd18' +branch_labels = None +depends_on = None + + +def upgrade(): + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.add_column(sa.Column('app_mode', sa.String(length=255), nullable=True)) + batch_op.create_index('message_app_mode_idx', ['app_mode'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + with op.batch_alter_table('messages', schema=None) as batch_op: + batch_op.drop_index('message_app_mode_idx') + batch_op.drop_column('app_mode') + + # ### end Alembic commands ### diff --git a/api/models/model.py b/api/models/model.py index 18958c8253..af22ab9538 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -910,6 +910,7 @@ class Message(Base): Index("message_account_idx", "app_id", "from_source", "from_account_id"), Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"), Index("message_created_at_idx", "created_at"), + Index("message_app_mode_idx", "app_mode"), ) id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) @@ -943,6 +944,7 @@ class Message(Base): updated_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) agent_based: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) workflow_run_id: Mapped[str | None] = mapped_column(StringUUID) + app_mode: Mapped[str | None] = mapped_column(String(255), nullable=True) @property def inputs(self) -> dict[str, Any]: @@ -1477,7 +1479,7 @@ class EndUser(Base, UserMixin): sa.Index("end_user_tenant_session_id_idx", "tenant_id", "session_id", "type"), ) - id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) + id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) app_id = mapped_column(StringUUID, nullable=True) type: Mapped[str] = mapped_column(String(255), nullable=False) diff --git a/api/pyproject.toml b/api/pyproject.toml index 7e9aeeaa97..040d9658b3 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -13,7 +13,7 @@ dependencies = [ "celery~=5.5.2", "chardet~=5.1.0", "flask~=3.1.2", - "flask-compress~=1.17", + "flask-compress>=1.17,<1.18", "flask-cors~=6.0.0", "flask-login~=0.6.3", "flask-migrate~=4.0.7", @@ -63,7 +63,7 @@ dependencies = [ "pycryptodome==3.19.1", "pydantic~=2.11.4", "pydantic-extra-types~=2.10.3", - "pydantic-settings~=2.9.1", + "pydantic-settings~=2.11.0", "pyjwt~=2.10.1", "pypdfium2==4.30.0", "python-docx~=1.1.0", @@ -86,6 +86,7 @@ dependencies = [ "sendgrid~=6.12.3", "flask-restx~=1.3.0", "packaging~=23.2", + "weaviate-client==4.17.0", ] # Before adding new dependency, consider place it in # alphabet order (a-z) and suitable group. @@ -166,6 +167,7 @@ dev = [ "mypy~=1.17.1", # "locust>=2.40.4", # Temporarily removed due to compatibility issues. Uncomment when resolved. "sseclient-py>=1.8.0", + "pytest-timeout>=2.4.0", ] ############################################################ @@ -214,7 +216,7 @@ vdb = [ "tidb-vector==0.0.9", "upstash-vector==0.6.0", "volcengine-compat~=1.0.0", - "weaviate-client~=3.24.0", + "weaviate-client==4.17.0", "xinference-client~=1.2.2", "mo-vector~=0.1.13", "mysql-connector-python>=9.3.0", diff --git a/api/repositories/api_workflow_run_repository.py b/api/repositories/api_workflow_run_repository.py index 3ac28fad75..72de9fed31 100644 --- a/api/repositories/api_workflow_run_repository.py +++ b/api/repositories/api_workflow_run_repository.py @@ -59,6 +59,7 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol): triggered_from: str, limit: int = 20, last_id: str | None = None, + status: str | None = None, ) -> InfiniteScrollPagination: """ Get paginated workflow runs with filtering. @@ -73,6 +74,7 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol): triggered_from: Filter by trigger source (e.g., "debugging", "app-run") limit: Maximum number of records to return (default: 20) last_id: Cursor for pagination - ID of the last record from previous page + status: Optional filter by status (e.g., "running", "succeeded", "failed") Returns: InfiniteScrollPagination object containing: @@ -107,6 +109,43 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol): """ ... + def get_workflow_runs_count( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + status: str | None = None, + time_range: str | None = None, + ) -> dict[str, int]: + """ + Get workflow runs count statistics. + + Retrieves total count and count by status for workflow runs + matching the specified filters. + + Args: + tenant_id: Tenant identifier for multi-tenant isolation + app_id: Application identifier + triggered_from: Filter by trigger source (e.g., "debugging", "app-run") + status: Optional filter by specific status + time_range: Optional time range filter (e.g., "7d", "4h", "30m", "30s") + Filters records based on created_at field + + Returns: + Dictionary containing: + - total: Total count of all workflow runs (or filtered by status) + - running: Count of workflow runs with status "running" + - succeeded: Count of workflow runs with status "succeeded" + - failed: Count of workflow runs with status "failed" + - stopped: Count of workflow runs with status "stopped" + - partial_succeeded: Count of workflow runs with status "partial-succeeded" + + Note: If a status is provided, 'total' will be the count for that status, + and the specific status count will also be set to this value, with all + other status counts being 0. + """ + ... + def get_expired_runs_batch( self, tenant_id: str, diff --git a/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py b/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py index 9bc6acc41f..7e2173acdd 100644 --- a/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py +++ b/api/repositories/sqlalchemy_api_workflow_node_execution_repository.py @@ -7,8 +7,10 @@ using SQLAlchemy 2.0 style queries for WorkflowNodeExecutionModel operations. from collections.abc import Sequence from datetime import datetime +from typing import cast from sqlalchemy import asc, delete, desc, select +from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session, sessionmaker from models.workflow import WorkflowNodeExecutionModel @@ -181,7 +183,7 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut # Delete the batch delete_stmt = delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(execution_ids)) - result = session.execute(delete_stmt) + result = cast(CursorResult, session.execute(delete_stmt)) session.commit() total_deleted += result.rowcount @@ -228,7 +230,7 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut # Delete the batch delete_stmt = delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(execution_ids)) - result = session.execute(delete_stmt) + result = cast(CursorResult, session.execute(delete_stmt)) session.commit() total_deleted += result.rowcount @@ -285,6 +287,6 @@ class DifyAPISQLAlchemyWorkflowNodeExecutionRepository(DifyAPIWorkflowNodeExecut with self._session_maker() as session: stmt = delete(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id.in_(execution_ids)) - result = session.execute(stmt) + result = cast(CursorResult, session.execute(stmt)) session.commit() return result.rowcount diff --git a/api/repositories/sqlalchemy_api_workflow_run_repository.py b/api/repositories/sqlalchemy_api_workflow_run_repository.py index 205f8c87ee..68affb59f3 100644 --- a/api/repositories/sqlalchemy_api_workflow_run_repository.py +++ b/api/repositories/sqlalchemy_api_workflow_run_repository.py @@ -22,11 +22,14 @@ Implementation Notes: import logging from collections.abc import Sequence from datetime import datetime +from typing import cast -from sqlalchemy import delete, select +from sqlalchemy import delete, func, select +from sqlalchemy.engine import CursorResult from sqlalchemy.orm import Session, sessionmaker from libs.infinite_scroll_pagination import InfiniteScrollPagination +from libs.time_parser import get_time_threshold from models.workflow import WorkflowRun from repositories.api_workflow_run_repository import APIWorkflowRunRepository @@ -61,6 +64,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): triggered_from: str, limit: int = 20, last_id: str | None = None, + status: str | None = None, ) -> InfiniteScrollPagination: """ Get paginated workflow runs with filtering. @@ -77,6 +81,10 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): WorkflowRun.triggered_from == triggered_from, ) + # Add optional status filter + if status: + base_stmt = base_stmt.where(WorkflowRun.status == status) + if last_id: # Get the last workflow run for cursor-based pagination last_run_stmt = base_stmt.where(WorkflowRun.id == last_id) @@ -118,6 +126,73 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): ) return session.scalar(stmt) + def get_workflow_runs_count( + self, + tenant_id: str, + app_id: str, + triggered_from: str, + status: str | None = None, + time_range: str | None = None, + ) -> dict[str, int]: + """ + Get workflow runs count statistics grouped by status. + """ + _initial_status_counts = { + "running": 0, + "succeeded": 0, + "failed": 0, + "stopped": 0, + "partial-succeeded": 0, + } + + with self._session_maker() as session: + # Build base where conditions + base_conditions = [ + WorkflowRun.tenant_id == tenant_id, + WorkflowRun.app_id == app_id, + WorkflowRun.triggered_from == triggered_from, + ] + + # Add time range filter if provided + if time_range: + time_threshold = get_time_threshold(time_range) + if time_threshold: + base_conditions.append(WorkflowRun.created_at >= time_threshold) + + # If status filter is provided, return simple count + if status: + count_stmt = select(func.count(WorkflowRun.id)).where(*base_conditions, WorkflowRun.status == status) + total = session.scalar(count_stmt) or 0 + + result = {"total": total} | _initial_status_counts + + # Set the count for the filtered status + if status in result: + result[status] = total + + return result + + # No status filter - get counts grouped by status + base_stmt = ( + select(WorkflowRun.status, func.count(WorkflowRun.id).label("count")) + .where(*base_conditions) + .group_by(WorkflowRun.status) + ) + + # Execute query + results = session.execute(base_stmt).all() + + # Build response dictionary + status_counts = _initial_status_counts.copy() + + total = 0 + for status_val, count in results: + total += count + if status_val in status_counts: + status_counts[status_val] = count + + return {"total": total} | status_counts + def get_expired_runs_batch( self, tenant_id: str, @@ -150,7 +225,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): with self._session_maker() as session: stmt = delete(WorkflowRun).where(WorkflowRun.id.in_(run_ids)) - result = session.execute(stmt) + result = cast(CursorResult, session.execute(stmt)) session.commit() deleted_count = result.rowcount @@ -186,7 +261,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): # Delete the batch delete_stmt = delete(WorkflowRun).where(WorkflowRun.id.in_(run_ids)) - result = session.execute(delete_stmt) + result = cast(CursorResult, session.execute(delete_stmt)) session.commit() batch_deleted = result.rowcount diff --git a/api/schedule/clean_workflow_runlogs_precise.py b/api/schedule/clean_workflow_runlogs_precise.py index 485a79782c..db4198720d 100644 --- a/api/schedule/clean_workflow_runlogs_precise.py +++ b/api/schedule/clean_workflow_runlogs_precise.py @@ -1,8 +1,11 @@ import datetime import logging import time +from collections.abc import Sequence import click +from sqlalchemy import select +from sqlalchemy.orm import Session, sessionmaker import app from configs import dify_config @@ -35,50 +38,53 @@ def clean_workflow_runlogs_precise(): retention_days = dify_config.WORKFLOW_LOG_RETENTION_DAYS cutoff_date = datetime.datetime.now() - datetime.timedelta(days=retention_days) + session_factory = sessionmaker(db.engine, expire_on_commit=False) try: - total_workflow_runs = db.session.query(WorkflowRun).where(WorkflowRun.created_at < cutoff_date).count() - if total_workflow_runs == 0: - logger.info("No expired workflow run logs found") - return - logger.info("Found %s expired workflow run logs to clean", total_workflow_runs) + with session_factory.begin() as session: + total_workflow_runs = session.query(WorkflowRun).where(WorkflowRun.created_at < cutoff_date).count() + if total_workflow_runs == 0: + logger.info("No expired workflow run logs found") + return + logger.info("Found %s expired workflow run logs to clean", total_workflow_runs) total_deleted = 0 failed_batches = 0 batch_count = 0 - while True: - workflow_runs = ( - db.session.query(WorkflowRun.id).where(WorkflowRun.created_at < cutoff_date).limit(BATCH_SIZE).all() - ) + with session_factory.begin() as session: + workflow_run_ids = session.scalars( + select(WorkflowRun.id) + .where(WorkflowRun.created_at < cutoff_date) + .order_by(WorkflowRun.created_at, WorkflowRun.id) + .limit(BATCH_SIZE) + ).all() - if not workflow_runs: - break - - workflow_run_ids = [run.id for run in workflow_runs] - batch_count += 1 - - success = _delete_batch_with_retry(workflow_run_ids, failed_batches) - - if success: - total_deleted += len(workflow_run_ids) - failed_batches = 0 - else: - failed_batches += 1 - if failed_batches >= MAX_RETRIES: - logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES) + if not workflow_run_ids: break + + batch_count += 1 + + success = _delete_batch(session, workflow_run_ids, failed_batches) + + if success: + total_deleted += len(workflow_run_ids) + failed_batches = 0 else: - # Calculate incremental delay times: 5, 10, 15 minutes - retry_delay_minutes = failed_batches * 5 - logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes) - time.sleep(retry_delay_minutes * 60) - continue + failed_batches += 1 + if failed_batches >= MAX_RETRIES: + logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES) + break + else: + # Calculate incremental delay times: 5, 10, 15 minutes + retry_delay_minutes = failed_batches * 5 + logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes) + time.sleep(retry_delay_minutes * 60) + continue logger.info("Cleanup completed: %s expired workflow run logs deleted", total_deleted) except Exception: - db.session.rollback() logger.exception("Unexpected error in workflow log cleanup") raise @@ -87,69 +93,56 @@ def clean_workflow_runlogs_precise(): click.echo(click.style(f"Cleaned workflow run logs from db success latency: {execution_time:.2f}s", fg="green")) -def _delete_batch_with_retry(workflow_run_ids: list[str], attempt_count: int) -> bool: - """Delete a single batch with a retry mechanism and complete cascading deletion""" +def _delete_batch(session: Session, workflow_run_ids: Sequence[str], attempt_count: int) -> bool: + """Delete a single batch of workflow runs and all related data within a nested transaction.""" try: - with db.session.begin_nested(): + with session.begin_nested(): message_data = ( - db.session.query(Message.id, Message.conversation_id) + session.query(Message.id, Message.conversation_id) .where(Message.workflow_run_id.in_(workflow_run_ids)) .all() ) message_id_list = [msg.id for msg in message_data] conversation_id_list = list({msg.conversation_id for msg in message_data if msg.conversation_id}) if message_id_list: - db.session.query(AppAnnotationHitHistory).where( - AppAnnotationHitHistory.message_id.in_(message_id_list) - ).delete(synchronize_session=False) + message_related_models = [ + AppAnnotationHitHistory, + MessageAgentThought, + MessageChain, + MessageFile, + MessageAnnotation, + MessageFeedback, + ] + for model in message_related_models: + session.query(model).where(model.message_id.in_(message_id_list)).delete(synchronize_session=False) # type: ignore + # error: "DeclarativeAttributeIntercept" has no attribute "message_id". But this type is only in lib + # and these 6 types all have the message_id field. - db.session.query(MessageAgentThought).where(MessageAgentThought.message_id.in_(message_id_list)).delete( + session.query(Message).where(Message.workflow_run_id.in_(workflow_run_ids)).delete( synchronize_session=False ) - db.session.query(MessageChain).where(MessageChain.message_id.in_(message_id_list)).delete( - synchronize_session=False - ) - - db.session.query(MessageFile).where(MessageFile.message_id.in_(message_id_list)).delete( - synchronize_session=False - ) - - db.session.query(MessageAnnotation).where(MessageAnnotation.message_id.in_(message_id_list)).delete( - synchronize_session=False - ) - - db.session.query(MessageFeedback).where(MessageFeedback.message_id.in_(message_id_list)).delete( - synchronize_session=False - ) - - db.session.query(Message).where(Message.workflow_run_id.in_(workflow_run_ids)).delete( - synchronize_session=False - ) - - db.session.query(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(workflow_run_ids)).delete( + session.query(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(workflow_run_ids)).delete( synchronize_session=False ) - db.session.query(WorkflowNodeExecutionModel).where( + session.query(WorkflowNodeExecutionModel).where( WorkflowNodeExecutionModel.workflow_run_id.in_(workflow_run_ids) ).delete(synchronize_session=False) if conversation_id_list: - db.session.query(ConversationVariable).where( + session.query(ConversationVariable).where( ConversationVariable.conversation_id.in_(conversation_id_list) ).delete(synchronize_session=False) - db.session.query(Conversation).where(Conversation.id.in_(conversation_id_list)).delete( + session.query(Conversation).where(Conversation.id.in_(conversation_id_list)).delete( synchronize_session=False ) - db.session.query(WorkflowRun).where(WorkflowRun.id.in_(workflow_run_ids)).delete(synchronize_session=False) + session.query(WorkflowRun).where(WorkflowRun.id.in_(workflow_run_ids)).delete(synchronize_session=False) - db.session.commit() - return True + return True except Exception: - db.session.rollback() logger.exception("Batch deletion failed (attempt %s)", attempt_count + 1) return False diff --git a/api/schedule/mail_clean_document_notify_task.py b/api/schedule/mail_clean_document_notify_task.py index ef6edd6709..b70707b17e 100644 --- a/api/schedule/mail_clean_document_notify_task.py +++ b/api/schedule/mail_clean_document_notify_task.py @@ -10,7 +10,7 @@ from configs import dify_config from extensions.ext_database import db from extensions.ext_mail import mail from libs.email_i18n import EmailType, get_email_i18n_service -from models.account import Account, Tenant, TenantAccountJoin +from models import Account, Tenant, TenantAccountJoin from models.dataset import Dataset, DatasetAutoDisableLog from services.feature_service import FeatureService diff --git a/api/services/account_service.py b/api/services/account_service.py index 106bc0e77e..cb0eb7a9dd 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -22,6 +22,7 @@ from libs.helper import RateLimiter, TokenManager from libs.passport import PassportService from libs.password import compare_password, hash_password, valid_password from libs.rsa import generate_key_pair +from libs.token import generate_csrf_token from models.account import ( Account, AccountIntegrate, @@ -76,6 +77,7 @@ logger = logging.getLogger(__name__) class TokenPair(BaseModel): access_token: str refresh_token: str + csrf_token: str REFRESH_TOKEN_PREFIX = "refresh_token:" @@ -403,10 +405,11 @@ class AccountService: access_token = AccountService.get_account_jwt_token(account=account) refresh_token = _generate_refresh_token() + csrf_token = generate_csrf_token(account.id) AccountService._store_refresh_token(refresh_token, account.id) - return TokenPair(access_token=access_token, refresh_token=refresh_token) + return TokenPair(access_token=access_token, refresh_token=refresh_token, csrf_token=csrf_token) @staticmethod def logout(*, account: Account): @@ -431,8 +434,9 @@ class AccountService: AccountService._delete_refresh_token(refresh_token, account.id) AccountService._store_refresh_token(new_refresh_token, account.id) + csrf_token = generate_csrf_token(account.id) - return TokenPair(access_token=new_access_token, refresh_token=new_refresh_token) + return TokenPair(access_token=new_access_token, refresh_token=new_refresh_token, csrf_token=csrf_token) @staticmethod def load_logged_in_account(*, account_id: str): diff --git a/api/services/agent_service.py b/api/services/agent_service.py index d631ce812f..b2db895a5a 100644 --- a/api/services/agent_service.py +++ b/api/services/agent_service.py @@ -10,7 +10,7 @@ from core.plugin.impl.exc import PluginDaemonClientSideError from core.tools.tool_manager import ToolManager from extensions.ext_database import db from libs.login import current_user -from models.account import Account +from models import Account from models.model import App, Conversation, EndUser, Message, MessageAgentThought diff --git a/api/services/annotation_service.py b/api/services/annotation_service.py index 9feca7337f..c0d26cdd27 100644 --- a/api/services/annotation_service.py +++ b/api/services/annotation_service.py @@ -8,8 +8,7 @@ from werkzeug.exceptions import NotFound from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.datetime_utils import naive_utc_now -from libs.login import current_user -from models.account import Account +from libs.login import current_account_with_tenant from models.model import App, AppAnnotationHitHistory, AppAnnotationSetting, Message, MessageAnnotation from services.feature_service import FeatureService from tasks.annotation.add_annotation_to_index_task import add_annotation_to_index_task @@ -24,10 +23,10 @@ class AppAnnotationService: @classmethod def up_insert_app_annotation_from_message(cls, args: dict, app_id: str) -> MessageAnnotation: # get app info - assert isinstance(current_user, Account) + current_user, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -63,12 +62,12 @@ class AppAnnotationService: db.session.commit() # if annotation reply is enabled , add annotation to index annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() - assert current_user.current_tenant_id is not None + assert current_tenant_id is not None if annotation_setting: add_annotation_to_index_task.delay( annotation.id, args["question"], - current_user.current_tenant_id, + current_tenant_id, app_id, annotation_setting.collection_binding_id, ) @@ -86,13 +85,12 @@ class AppAnnotationService: enable_app_annotation_job_key = f"enable_app_annotation_job_{str(job_id)}" # send batch add segments task redis_client.setnx(enable_app_annotation_job_key, "waiting") - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, current_tenant_id = current_account_with_tenant() enable_annotation_reply_task.delay( str(job_id), app_id, current_user.id, - current_user.current_tenant_id, + current_tenant_id, args["score_threshold"], args["embedding_provider_name"], args["embedding_model_name"], @@ -101,8 +99,7 @@ class AppAnnotationService: @classmethod def disable_app_annotation(cls, app_id: str): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() disable_app_annotation_key = f"disable_app_annotation_{str(app_id)}" cache_result = redis_client.get(disable_app_annotation_key) if cache_result is not None: @@ -113,17 +110,16 @@ class AppAnnotationService: disable_app_annotation_job_key = f"disable_app_annotation_job_{str(job_id)}" # send batch add segments task redis_client.setnx(disable_app_annotation_job_key, "waiting") - disable_annotation_reply_task.delay(str(job_id), app_id, current_user.current_tenant_id) + disable_annotation_reply_task.delay(str(job_id), app_id, current_tenant_id) return {"job_id": job_id, "job_status": "waiting"} @classmethod def get_annotation_list_by_app_id(cls, app_id: str, page: int, limit: int, keyword: str): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -153,11 +149,10 @@ class AppAnnotationService: @classmethod def export_annotation_list_by_app_id(cls, app_id: str): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -174,11 +169,10 @@ class AppAnnotationService: @classmethod def insert_app_annotation_directly(cls, args: dict, app_id: str) -> MessageAnnotation: # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -196,7 +190,7 @@ class AppAnnotationService: add_annotation_to_index_task.delay( annotation.id, args["question"], - current_user.current_tenant_id, + current_tenant_id, app_id, annotation_setting.collection_binding_id, ) @@ -205,11 +199,10 @@ class AppAnnotationService: @classmethod def update_app_annotation_directly(cls, args: dict, app_id: str, annotation_id: str): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -234,7 +227,7 @@ class AppAnnotationService: update_annotation_to_index_task.delay( annotation.id, annotation.question, - current_user.current_tenant_id, + current_tenant_id, app_id, app_annotation_setting.collection_binding_id, ) @@ -244,11 +237,10 @@ class AppAnnotationService: @classmethod def delete_app_annotation(cls, app_id: str, annotation_id: str): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -277,17 +269,16 @@ class AppAnnotationService: if app_annotation_setting: delete_annotation_index_task.delay( - annotation.id, app_id, current_user.current_tenant_id, app_annotation_setting.collection_binding_id + annotation.id, app_id, current_tenant_id, app_annotation_setting.collection_binding_id ) @classmethod def delete_app_annotations_in_batch(cls, app_id: str, annotation_ids: list[str]): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -317,7 +308,7 @@ class AppAnnotationService: for annotation, annotation_setting in annotations_to_delete: if annotation_setting: delete_annotation_index_task.delay( - annotation.id, app_id, current_user.current_tenant_id, annotation_setting.collection_binding_id + annotation.id, app_id, current_tenant_id, annotation_setting.collection_binding_id ) # Step 4: Bulk delete annotations in a single query @@ -333,11 +324,10 @@ class AppAnnotationService: @classmethod def batch_import_app_annotations(cls, app_id, file: FileStorage): # get app info - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -354,7 +344,7 @@ class AppAnnotationService: if len(result) == 0: raise ValueError("The CSV file is empty.") # check annotation limit - features = FeatureService.get_features(current_user.current_tenant_id) + features = FeatureService.get_features(current_tenant_id) if features.billing.enabled: annotation_quota_limit = features.annotation_quota_limit if annotation_quota_limit.limit < len(result) + annotation_quota_limit.size: @@ -364,21 +354,18 @@ class AppAnnotationService: indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}" # send batch add segments task redis_client.setnx(indexing_cache_key, "waiting") - batch_import_annotations_task.delay( - str(job_id), result, app_id, current_user.current_tenant_id, current_user.id - ) + batch_import_annotations_task.delay(str(job_id), result, app_id, current_tenant_id, current_user.id) except Exception as e: return {"error_msg": str(e)} return {"job_id": job_id, "job_status": "waiting"} @classmethod def get_annotation_hit_histories(cls, app_id: str, annotation_id: str, page, limit): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() # get app info app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -445,12 +432,11 @@ class AppAnnotationService: @classmethod def get_app_annotation_setting_by_app_id(cls, app_id: str): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() # get app info app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -481,12 +467,11 @@ class AppAnnotationService: @classmethod def update_app_annotation_setting(cls, app_id: str, annotation_setting_id: str, args: dict): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + current_user, current_tenant_id = current_account_with_tenant() # get app info app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -531,11 +516,10 @@ class AppAnnotationService: @classmethod def clear_all_annotations(cls, app_id: str): - assert isinstance(current_user, Account) - assert current_user.current_tenant_id is not None + _, current_tenant_id = current_account_with_tenant() app = ( db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") .first() ) @@ -558,7 +542,7 @@ class AppAnnotationService: # if annotation reply is enabled, delete annotation index if app_annotation_setting: delete_annotation_index_task.delay( - annotation.id, app_id, current_user.current_tenant_id, app_annotation_setting.collection_binding_id + annotation.id, app_id, current_tenant_id, app_annotation_setting.collection_binding_id ) db.session.delete(annotation) diff --git a/api/services/app_service.py b/api/services/app_service.py index 4fc6cf2494..5f8c5089c9 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -18,7 +18,7 @@ from events.app_event import app_was_created from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from libs.login import current_user -from models.account import Account +from models import Account from models.model import App, AppMode, AppModelConfig, Site from models.tools import ApiToolProvider from services.billing_service import BillingService diff --git a/api/services/billing_service.py b/api/services/billing_service.py index 9d6c5b4b31..a6851d2638 100644 --- a/api/services/billing_service.py +++ b/api/services/billing_service.py @@ -7,7 +7,7 @@ from tenacity import retry, retry_if_exception_type, stop_before_delay, wait_fix from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.helper import RateLimiter -from models.account import Account, TenantAccountJoin, TenantAccountRole +from models import Account, TenantAccountJoin, TenantAccountRole class BillingService: diff --git a/api/services/conversation_service.py b/api/services/conversation_service.py index a8e51a426d..39d6c81621 100644 --- a/api/services/conversation_service.py +++ b/api/services/conversation_service.py @@ -14,8 +14,7 @@ from extensions.ext_database import db from factories import variable_factory from libs.datetime_utils import naive_utc_now from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models import ConversationVariable -from models.account import Account +from models import Account, ConversationVariable from models.model import App, Conversation, EndUser, Message from services.errors.conversation import ( ConversationNotExistsError, diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index 53216e4fdd..f4047da6b8 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -29,7 +29,7 @@ from extensions.ext_redis import redis_client from libs import helper from libs.datetime_utils import naive_utc_now from libs.login import current_user -from models.account import Account, TenantAccountRole +from models import Account, TenantAccountRole from models.dataset import ( AppDatasetJoin, ChildChunk, diff --git a/api/services/datasource_provider_service.py b/api/services/datasource_provider_service.py index 36b7084973..1b690e2266 100644 --- a/api/services/datasource_provider_service.py +++ b/api/services/datasource_provider_service.py @@ -3,7 +3,6 @@ import time from collections.abc import Mapping from typing import Any -from flask_login import current_user from sqlalchemy.orm import Session from configs import dify_config @@ -25,6 +24,16 @@ from services.plugin.plugin_service import PluginService logger = logging.getLogger(__name__) +def get_current_user(): + from libs.login import current_user + from models.account import Account + from models.model import EndUser + + if not isinstance(current_user._get_current_object(), (Account, EndUser)): # type: ignore + raise TypeError(f"current_user must be Account or EndUser, got {type(current_user).__name__}") + return current_user + + class DatasourceProviderService: """ Model Provider Service @@ -109,6 +118,7 @@ class DatasourceProviderService: return {} # refresh the credentials if datasource_provider.expires_at != -1 and (datasource_provider.expires_at - 60) < int(time.time()): + current_user = get_current_user() decrypted_credentials = self.decrypt_datasource_provider_credentials( tenant_id=tenant_id, datasource_provider=datasource_provider, @@ -166,6 +176,7 @@ class DatasourceProviderService: ) if not datasource_providers: return [] + current_user = get_current_user() # refresh the credentials real_credentials_list = [] for datasource_provider in datasource_providers: @@ -604,6 +615,7 @@ class DatasourceProviderService: """ provider_name = provider_id.provider_name plugin_id = provider_id.plugin_id + with Session(db.engine) as session: lock = f"datasource_provider_create_lock:{tenant_id}_{provider_id}_{CredentialType.API_KEY}" with redis_client.lock(lock, timeout=20): @@ -624,6 +636,7 @@ class DatasourceProviderService: raise ValueError("Authorization name is already exists") try: + current_user = get_current_user() self.provider_manager.validate_provider_credentials( tenant_id=tenant_id, user_id=current_user.id, @@ -901,6 +914,7 @@ class DatasourceProviderService: """ update datasource credentials. """ + with Session(db.engine) as session: datasource_provider = ( session.query(DatasourceProvider) @@ -936,6 +950,7 @@ class DatasourceProviderService: for key, value in credentials.items() } try: + current_user = get_current_user() self.provider_manager.validate_provider_credentials( tenant_id=tenant_id, user_id=current_user.id, diff --git a/api/services/enterprise/enterprise_service.py b/api/services/enterprise/enterprise_service.py index 4fbf33fd6f..974aa849db 100644 --- a/api/services/enterprise/enterprise_service.py +++ b/api/services/enterprise/enterprise_service.py @@ -46,17 +46,17 @@ class EnterpriseService: class WebAppAuth: @classmethod - def is_user_allowed_to_access_webapp(cls, user_id: str, app_code: str): - params = {"userId": user_id, "appCode": app_code} + def is_user_allowed_to_access_webapp(cls, user_id: str, app_id: str): + params = {"userId": user_id, "appId": app_id} data = EnterpriseRequest.send_request("GET", "/webapp/permission", params=params) return data.get("result", False) @classmethod - def batch_is_user_allowed_to_access_webapps(cls, user_id: str, app_codes: list[str]): - if not app_codes: + def batch_is_user_allowed_to_access_webapps(cls, user_id: str, app_ids: list[str]): + if not app_ids: return {} - body = {"userId": user_id, "appCodes": app_codes} + body = {"userId": user_id, "appIds": app_ids} data = EnterpriseRequest.send_request("POST", "/webapp/permission/batch", json=body) if not data: raise ValueError("No data found.") diff --git a/api/services/file_service.py b/api/services/file_service.py index f0bb68766d..dd6a829ea2 100644 --- a/api/services/file_service.py +++ b/api/services/file_service.py @@ -19,7 +19,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor from extensions.ext_storage import storage from libs.datetime_utils import naive_utc_now from libs.helper import extract_tenant_id -from models.account import Account +from models import Account from models.enums import CreatorUserRole from models.model import EndUser, UploadFile diff --git a/api/services/hit_testing_service.py b/api/services/hit_testing_service.py index c6ea35076e..7fa82c6d22 100644 --- a/api/services/hit_testing_service.py +++ b/api/services/hit_testing_service.py @@ -9,7 +9,7 @@ from core.rag.models.document import Document from core.rag.retrieval.dataset_retrieval import DatasetRetrieval from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db -from models.account import Account +from models import Account from models.dataset import Dataset, DatasetQuery logger = logging.getLogger(__name__) diff --git a/api/services/message_service.py b/api/services/message_service.py index 5e356bf925..7ed56d80f2 100644 --- a/api/services/message_service.py +++ b/api/services/message_service.py @@ -12,7 +12,7 @@ from core.ops.ops_trace_manager import TraceQueueManager, TraceTask from core.ops.utils import measure_time from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models.account import Account +from models import Account from models.model import App, AppMode, AppModelConfig, EndUser, Message, MessageFeedback from services.conversation_service import ConversationService from services.errors.message import ( @@ -288,9 +288,10 @@ class MessageService: ) with measure_time() as timer: - questions: list[str] = LLMGenerator.generate_suggested_questions_after_answer( + questions_sequence = LLMGenerator.generate_suggested_questions_after_answer( tenant_id=app_model.tenant_id, histories=histories ) + questions: list[str] = list(questions_sequence) # get tracing instance trace_manager = TraceQueueManager(app_id=app_model.id) diff --git a/api/services/metadata_service.py b/api/services/metadata_service.py index 6add830813..5f280c9e57 100644 --- a/api/services/metadata_service.py +++ b/api/services/metadata_service.py @@ -1,12 +1,11 @@ import copy import logging -from flask_login import current_user - from core.rag.index_processor.constant.built_in_field import BuiltInField, MetadataDataSource from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.datetime_utils import naive_utc_now +from libs.login import current_account_with_tenant from models.dataset import Dataset, DatasetMetadata, DatasetMetadataBinding from services.dataset_service import DocumentService from services.entities.knowledge_entities.knowledge_entities import ( @@ -23,11 +22,11 @@ class MetadataService: # check if metadata name is too long if len(metadata_args.name) > 255: raise ValueError("Metadata name cannot exceed 255 characters.") - + current_user, current_tenant_id = current_account_with_tenant() # check if metadata name already exists if ( db.session.query(DatasetMetadata) - .filter_by(tenant_id=current_user.current_tenant_id, dataset_id=dataset_id, name=metadata_args.name) + .filter_by(tenant_id=current_tenant_id, dataset_id=dataset_id, name=metadata_args.name) .first() ): raise ValueError("Metadata name already exists.") @@ -35,7 +34,7 @@ class MetadataService: if field.value == metadata_args.name: raise ValueError("Metadata name already exists in Built-in fields.") metadata = DatasetMetadata( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, dataset_id=dataset_id, type=metadata_args.type, name=metadata_args.name, @@ -53,9 +52,10 @@ class MetadataService: lock_key = f"dataset_metadata_lock_{dataset_id}" # check if metadata name already exists + current_user, current_tenant_id = current_account_with_tenant() if ( db.session.query(DatasetMetadata) - .filter_by(tenant_id=current_user.current_tenant_id, dataset_id=dataset_id, name=name) + .filter_by(tenant_id=current_tenant_id, dataset_id=dataset_id, name=name) .first() ): raise ValueError("Metadata name already exists.") @@ -220,9 +220,10 @@ class MetadataService: db.session.commit() # deal metadata binding db.session.query(DatasetMetadataBinding).filter_by(document_id=operation.document_id).delete() + current_user, current_tenant_id = current_account_with_tenant() for metadata_value in operation.metadata_list: dataset_metadata_binding = DatasetMetadataBinding( - tenant_id=current_user.current_tenant_id, + tenant_id=current_tenant_id, dataset_id=dataset.id, document_id=operation.document_id, metadata_id=metadata_value.id, diff --git a/api/services/oauth_server.py b/api/services/oauth_server.py index b722dbee22..b05b43d76e 100644 --- a/api/services/oauth_server.py +++ b/api/services/oauth_server.py @@ -7,7 +7,7 @@ from werkzeug.exceptions import BadRequest from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.account import Account +from models import Account from models.model import OAuthProviderApp from services.account_service import AccountService diff --git a/api/services/ops_service.py b/api/services/ops_service.py index b4b23b8360..e490b7ed3c 100644 --- a/api/services/ops_service.py +++ b/api/services/ops_service.py @@ -102,6 +102,15 @@ class OpsService: except Exception: new_decrypt_tracing_config.update({"project_url": "https://arms.console.aliyun.com/"}) + if tracing_provider == "tencent" and ( + "project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url") + ): + try: + project_url = OpsTraceManager.get_trace_config_project_url(decrypt_tracing_config, tracing_provider) + new_decrypt_tracing_config.update({"project_url": project_url}) + except Exception: + new_decrypt_tracing_config.update({"project_url": "https://console.cloud.tencent.com/apm"}) + trace_config_data.tracing_config = new_decrypt_tracing_config return trace_config_data.to_dict() @@ -144,7 +153,7 @@ class OpsService: project_url = f"{tracing_config.get('host')}/project/{project_key}" except Exception: project_url = None - elif tracing_provider in ("langsmith", "opik"): + elif tracing_provider in ("langsmith", "opik", "tencent"): try: project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider) except Exception: diff --git a/api/services/plugin/plugin_service.py b/api/services/plugin/plugin_service.py index 604adeb7b5..525ccc9417 100644 --- a/api/services/plugin/plugin_service.py +++ b/api/services/plugin/plugin_service.py @@ -336,6 +336,8 @@ class PluginService: pkg, verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only, ) + PluginService._check_plugin_installation_scope(response.verification) + return response @staticmethod @@ -358,6 +360,8 @@ class PluginService: pkg, verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only, ) + PluginService._check_plugin_installation_scope(response.verification) + return response @staticmethod @@ -377,6 +381,10 @@ class PluginService: manager = PluginInstaller() + for plugin_unique_identifier in plugin_unique_identifiers: + resp = manager.decode_plugin_from_identifier(tenant_id, plugin_unique_identifier) + PluginService._check_plugin_installation_scope(resp.verification) + return manager.install_from_identifiers( tenant_id, plugin_unique_identifiers, @@ -393,6 +401,9 @@ class PluginService: PluginService._check_marketplace_only_permission() manager = PluginInstaller() + plugin_decode_response = manager.decode_plugin_from_identifier(tenant_id, plugin_unique_identifier) + PluginService._check_plugin_installation_scope(plugin_decode_response.verification) + return manager.install_from_identifiers( tenant_id, [plugin_unique_identifier], diff --git a/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py b/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py index ca871bcaa1..4ac2e0792b 100644 --- a/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py +++ b/api/services/rag_pipeline/pipeline_template/customized/customized_retrieval.py @@ -1,7 +1,7 @@ import yaml -from flask_login import current_user from extensions.ext_database import db +from libs.login import current_account_with_tenant from models.dataset import PipelineCustomizedTemplate from services.rag_pipeline.pipeline_template.pipeline_template_base import PipelineTemplateRetrievalBase from services.rag_pipeline.pipeline_template.pipeline_template_type import PipelineTemplateType @@ -13,9 +13,8 @@ class CustomizedPipelineTemplateRetrieval(PipelineTemplateRetrievalBase): """ def get_pipeline_templates(self, language: str) -> dict: - result = self.fetch_pipeline_templates_from_customized( - tenant_id=current_user.current_tenant_id, language=language - ) + _, current_tenant_id = current_account_with_tenant() + result = self.fetch_pipeline_templates_from_customized(tenant_id=current_tenant_id, language=language) return result def get_pipeline_template_detail(self, template_id: str): diff --git a/api/services/rag_pipeline/rag_pipeline.py b/api/services/rag_pipeline/rag_pipeline.py index 13c0ca7392..f6dddd75a3 100644 --- a/api/services/rag_pipeline/rag_pipeline.py +++ b/api/services/rag_pipeline/rag_pipeline.py @@ -37,7 +37,6 @@ from core.rag.entities.event import ( from core.repositories.factory import DifyCoreRepositoryFactory from core.repositories.sqlalchemy_workflow_node_execution_repository import SQLAlchemyWorkflowNodeExecutionRepository from core.variables.variables import Variable -from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import ( WorkflowNodeExecution, WorkflowNodeExecutionStatus, @@ -50,11 +49,12 @@ from core.workflow.node_events.base import NodeRunResult from core.workflow.nodes.base.node import Node from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING from core.workflow.repositories.workflow_node_execution_repository import OrderConfig +from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models.account import Account +from models import Account from models.dataset import ( # type: ignore Dataset, Document, diff --git a/api/services/saved_message_service.py b/api/services/saved_message_service.py index 67a0106bbd..4dd6c8107b 100644 --- a/api/services/saved_message_service.py +++ b/api/services/saved_message_service.py @@ -2,7 +2,7 @@ from typing import Union from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models.account import Account +from models import Account from models.model import App, EndUser from models.web import SavedMessage from services.message_service import MessageService diff --git a/api/services/variable_truncator.py b/api/services/variable_truncator.py index d02508e4f3..a8f37c31c8 100644 --- a/api/services/variable_truncator.py +++ b/api/services/variable_truncator.py @@ -79,7 +79,7 @@ class VariableTruncator: self, string_length_limit=5000, array_element_limit: int = 20, - max_size_bytes: int = 1024_000, # 100KB + max_size_bytes: int = 1024_000, # 1000 KiB ): if string_length_limit <= 3: raise ValueError("string_length_limit should be greater than 3.") diff --git a/api/services/web_conversation_service.py b/api/services/web_conversation_service.py index 0f54e838f3..560aec2330 100644 --- a/api/services/web_conversation_service.py +++ b/api/services/web_conversation_service.py @@ -6,7 +6,7 @@ from sqlalchemy.orm import Session from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models.account import Account +from models import Account from models.model import App, EndUser from models.web import PinnedConversation from services.conversation_service import ConversationService diff --git a/api/services/webapp_auth_service.py b/api/services/webapp_auth_service.py index d30e14f7a1..9bd797a45f 100644 --- a/api/services/webapp_auth_service.py +++ b/api/services/webapp_auth_service.py @@ -10,7 +10,7 @@ from extensions.ext_database import db from libs.helper import TokenManager from libs.passport import PassportService from libs.password import compare_password -from models.account import Account, AccountStatus +from models import Account, AccountStatus from models.model import App, EndUser, Site from services.app_service import AppService from services.enterprise.enterprise_service import EnterpriseService @@ -172,7 +172,8 @@ class WebAppAuthService: return WebAppAuthType.EXTERNAL if app_code: - webapp_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_code(app_code) + app_id = AppService.get_app_id_by_code(app_code) + webapp_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id=app_id) return cls.get_app_auth_type(access_mode=webapp_settings.access_mode) raise ValueError("Could not determine app authentication type.") diff --git a/api/services/website_service.py b/api/services/website_service.py index 37588d6ba5..a23f01ec71 100644 --- a/api/services/website_service.py +++ b/api/services/website_service.py @@ -23,6 +23,7 @@ class CrawlOptions: only_main_content: bool = False includes: str | None = None excludes: str | None = None + prompt: str | None = None max_depth: int | None = None use_sitemap: bool = True @@ -70,6 +71,7 @@ class WebsiteCrawlApiRequest: only_main_content=self.options.get("only_main_content", False), includes=self.options.get("includes"), excludes=self.options.get("excludes"), + prompt=self.options.get("prompt"), max_depth=self.options.get("max_depth"), use_sitemap=self.options.get("use_sitemap", True), ) @@ -174,6 +176,7 @@ class WebsiteService: def _crawl_with_firecrawl(cls, request: CrawlRequest, api_key: str, config: dict) -> dict[str, Any]: firecrawl_app = FirecrawlApp(api_key=api_key, base_url=config.get("base_url")) + params: dict[str, Any] if not request.options.crawl_sub_pages: params = { "includePaths": [], @@ -188,8 +191,10 @@ class WebsiteService: "limit": request.options.limit, "scrapeOptions": {"onlyMainContent": request.options.only_main_content}, } - if request.options.max_depth: - params["maxDepth"] = request.options.max_depth + + # Add optional prompt for Firecrawl v2 crawl-params compatibility + if request.options.prompt: + params["prompt"] = request.options.prompt job_id = firecrawl_app.crawl_url(request.url, params) website_crawl_time_cache_key = f"website_crawl_{job_id}" diff --git a/api/services/workflow/workflow_converter.py b/api/services/workflow/workflow_converter.py index 9c09f54bf5..e70b2b5c95 100644 --- a/api/services/workflow/workflow_converter.py +++ b/api/services/workflow/workflow_converter.py @@ -22,7 +22,7 @@ from core.prompt.utils.prompt_template_parser import PromptTemplateParser from core.workflow.nodes import NodeType from events.app_event import app_was_created from extensions.ext_database import db -from models.account import Account +from models import Account from models.api_based_extension import APIBasedExtension, APIBasedExtensionPoint from models.model import App, AppMode, AppModelConfig from models.workflow import Workflow, WorkflowType diff --git a/api/services/workflow_app_service.py b/api/services/workflow_app_service.py index ced6dca324..23dd436675 100644 --- a/api/services/workflow_app_service.py +++ b/api/services/workflow_app_service.py @@ -86,12 +86,16 @@ class WorkflowAppService: ), ) if created_by_account: + account = session.scalar(select(Account).where(Account.email == created_by_account)) + if not account: + raise ValueError(f"Account not found: {created_by_account}") + stmt = stmt.join( Account, and_( WorkflowAppLog.created_by == Account.id, WorkflowAppLog.created_by_role == CreatorUserRole.ACCOUNT, - Account.email == created_by_account, + Account.id == account.id, ), ) diff --git a/api/services/workflow_draft_variable_service.py b/api/services/workflow_draft_variable_service.py index 344b7486ee..5e63a83bb1 100644 --- a/api/services/workflow_draft_variable_service.py +++ b/api/services/workflow_draft_variable_service.py @@ -32,8 +32,7 @@ from factories.file_factory import StorageKeyLoader from factories.variable_factory import build_segment, segment_to_variable from libs.datetime_utils import naive_utc_now from libs.uuid_utils import uuidv7 -from models import App, Conversation -from models.account import Account +from models import Account, App, Conversation from models.enums import DraftVariableType from models.workflow import Workflow, WorkflowDraftVariable, WorkflowDraftVariableFile, is_system_variable_editable from repositories.factory import DifyAPIRepositoryFactory diff --git a/api/services/workflow_run_service.py b/api/services/workflow_run_service.py index 6a2edd912a..5c8719b499 100644 --- a/api/services/workflow_run_service.py +++ b/api/services/workflow_run_service.py @@ -26,13 +26,15 @@ class WorkflowRunService: ) self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker) - def get_paginate_advanced_chat_workflow_runs(self, app_model: App, args: dict) -> InfiniteScrollPagination: + def get_paginate_advanced_chat_workflow_runs( + self, app_model: App, args: dict, triggered_from: WorkflowRunTriggeredFrom = WorkflowRunTriggeredFrom.DEBUGGING + ) -> InfiniteScrollPagination: """ Get advanced chat app workflow run list - Only return triggered_from == advanced_chat :param app_model: app model :param args: request args + :param triggered_from: workflow run triggered from (default: DEBUGGING for preview runs) """ class WorkflowWithMessage: @@ -45,7 +47,7 @@ class WorkflowRunService: def __getattr__(self, item): return getattr(self._workflow_run, item) - pagination = self.get_paginate_workflow_runs(app_model, args) + pagination = self.get_paginate_workflow_runs(app_model, args, triggered_from) with_message_workflow_runs = [] for workflow_run in pagination.data: @@ -60,23 +62,27 @@ class WorkflowRunService: pagination.data = with_message_workflow_runs return pagination - def get_paginate_workflow_runs(self, app_model: App, args: dict) -> InfiniteScrollPagination: + def get_paginate_workflow_runs( + self, app_model: App, args: dict, triggered_from: WorkflowRunTriggeredFrom = WorkflowRunTriggeredFrom.DEBUGGING + ) -> InfiniteScrollPagination: """ - Get debug workflow run list - Only return triggered_from == debugging + Get workflow run list :param app_model: app model :param args: request args + :param triggered_from: workflow run triggered from (default: DEBUGGING) """ limit = int(args.get("limit", 20)) last_id = args.get("last_id") + status = args.get("status") return self._workflow_run_repo.get_paginated_workflow_runs( tenant_id=app_model.tenant_id, app_id=app_model.id, - triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + triggered_from=triggered_from, limit=limit, last_id=last_id, + status=status, ) def get_workflow_run(self, app_model: App, run_id: str) -> WorkflowRun | None: @@ -92,6 +98,30 @@ class WorkflowRunService: run_id=run_id, ) + def get_workflow_runs_count( + self, + app_model: App, + status: str | None = None, + time_range: str | None = None, + triggered_from: WorkflowRunTriggeredFrom = WorkflowRunTriggeredFrom.DEBUGGING, + ) -> dict[str, int]: + """ + Get workflow runs count statistics + + :param app_model: app model + :param status: optional status filter + :param time_range: optional time range filter (e.g., "7d", "4h", "30m", "30s") + :param triggered_from: workflow run triggered from (default: DEBUGGING) + :return: dict with total and status counts + """ + return self._workflow_run_repo.get_workflow_runs_count( + tenant_id=app_model.tenant_id, + app_id=app_model.id, + triggered_from=triggered_from, + status=status, + time_range=time_range, + ) + def get_workflow_run_node_executions( self, app_model: App, diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index dea6a657a4..2f69e46074 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -14,7 +14,7 @@ from core.file import File from core.repositories import DifyCoreRepositoryFactory from core.variables import Variable from core.variables.variables import VariableUnion -from core.workflow.entities import VariablePool, WorkflowNodeExecution +from core.workflow.entities import WorkflowNodeExecution from core.workflow.enums import ErrorStrategy, WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus from core.workflow.errors import WorkflowNodeRunFailedError from core.workflow.graph_events import GraphNodeEventBase, NodeRunFailedEvent, NodeRunSucceededEvent @@ -23,6 +23,7 @@ from core.workflow.nodes import NodeType from core.workflow.nodes.base.node import Node from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING from core.workflow.nodes.start.entities import StartNodeData +from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.workflow_entry import WorkflowEntry from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated @@ -30,7 +31,7 @@ from extensions.ext_database import db from extensions.ext_storage import storage from factories.file_factory import build_from_mapping, build_from_mappings from libs.datetime_utils import naive_utc_now -from models.account import Account +from models import Account from models.model import App, AppMode from models.tools import WorkflowToolProvider from models.workflow import Workflow, WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom, WorkflowType diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py index 951b9e5653..b528728364 100644 --- a/api/tasks/batch_create_segment_to_index_task.py +++ b/api/tasks/batch_create_segment_to_index_task.py @@ -8,7 +8,6 @@ import click import pandas as pd from celery import shared_task from sqlalchemy import func -from sqlalchemy.orm import Session from core.model_manager import ModelManager from core.model_runtime.entities.model_entities import ModelType @@ -50,54 +49,48 @@ def batch_create_segment_to_index_task( indexing_cache_key = f"segment_batch_import_{job_id}" try: - with Session(db.engine) as session: - dataset = session.get(Dataset, dataset_id) - if not dataset: - raise ValueError("Dataset not exist.") + dataset = db.session.get(Dataset, dataset_id) + if not dataset: + raise ValueError("Dataset not exist.") - dataset_document = session.get(Document, document_id) - if not dataset_document: - raise ValueError("Document not exist.") + dataset_document = db.session.get(Document, document_id) + if not dataset_document: + raise ValueError("Document not exist.") - if ( - not dataset_document.enabled - or dataset_document.archived - or dataset_document.indexing_status != "completed" - ): - raise ValueError("Document is not available.") + if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": + raise ValueError("Document is not available.") - upload_file = session.get(UploadFile, upload_file_id) - if not upload_file: - raise ValueError("UploadFile not found.") + upload_file = db.session.get(UploadFile, upload_file_id) + if not upload_file: + raise ValueError("UploadFile not found.") - with tempfile.TemporaryDirectory() as temp_dir: - suffix = Path(upload_file.key).suffix - # FIXME mypy: Cannot determine type of 'tempfile._get_candidate_names' better not use it here - file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore - storage.download(upload_file.key, file_path) + with tempfile.TemporaryDirectory() as temp_dir: + suffix = Path(upload_file.key).suffix + file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore + storage.download(upload_file.key, file_path) - # Skip the first row - df = pd.read_csv(file_path) - content = [] - for _, row in df.iterrows(): - if dataset_document.doc_form == "qa_model": - data = {"content": row.iloc[0], "answer": row.iloc[1]} - else: - data = {"content": row.iloc[0]} - content.append(data) - if len(content) == 0: - raise ValueError("The CSV file is empty.") + df = pd.read_csv(file_path) + content = [] + for _, row in df.iterrows(): + if dataset_document.doc_form == "qa_model": + data = {"content": row.iloc[0], "answer": row.iloc[1]} + else: + data = {"content": row.iloc[0]} + content.append(data) + if len(content) == 0: + raise ValueError("The CSV file is empty.") + + document_segments = [] + embedding_model = None + if dataset.indexing_technique == "high_quality": + model_manager = ModelManager() + embedding_model = model_manager.get_model_instance( + tenant_id=dataset.tenant_id, + provider=dataset.embedding_model_provider, + model_type=ModelType.TEXT_EMBEDDING, + model=dataset.embedding_model, + ) - document_segments = [] - embedding_model = None - if dataset.indexing_technique == "high_quality": - model_manager = ModelManager() - embedding_model = model_manager.get_model_instance( - tenant_id=dataset.tenant_id, - provider=dataset.embedding_model_provider, - model_type=ModelType.TEXT_EMBEDDING, - model=dataset.embedding_model, - ) word_count_change = 0 if embedding_model: tokens_list = embedding_model.get_text_embedding_num_tokens( @@ -105,6 +98,7 @@ def batch_create_segment_to_index_task( ) else: tokens_list = [0] * len(content) + for segment, tokens in zip(content, tokens_list): content = segment["content"] doc_id = str(uuid.uuid4()) @@ -135,11 +129,11 @@ def batch_create_segment_to_index_task( word_count_change += segment_document.word_count db.session.add(segment_document) document_segments.append(segment_document) - # update document word count + assert dataset_document.word_count is not None dataset_document.word_count += word_count_change db.session.add(dataset_document) - # add index to db + VectorService.create_segments_vector(None, document_segments, dataset, dataset_document.doc_form) db.session.commit() redis_client.setex(indexing_cache_key, 600, "completed") diff --git a/api/tasks/delete_account_task.py b/api/tasks/delete_account_task.py index 611aef86ad..fb5eb1d691 100644 --- a/api/tasks/delete_account_task.py +++ b/api/tasks/delete_account_task.py @@ -3,7 +3,7 @@ import logging from celery import shared_task from extensions.ext_database import db -from models.account import Account +from models import Account from services.billing_service import BillingService from tasks.mail_account_deletion_task import send_deletion_success_task diff --git a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py index 4171656131..6de95a3b85 100644 --- a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py @@ -16,7 +16,7 @@ from core.app.entities.app_invoke_entities import InvokeFrom, RagPipelineGenerat from core.app.entities.rag_pipeline_invoke_entities import RagPipelineInvokeEntity from core.repositories.factory import DifyCoreRepositoryFactory from extensions.ext_database import db -from models.account import Account, Tenant +from models import Account, Tenant from models.dataset import Pipeline from models.enums import WorkflowRunTriggeredFrom from models.workflow import Workflow, WorkflowNodeExecutionTriggeredFrom diff --git a/api/tasks/rag_pipeline/rag_pipeline_run_task.py b/api/tasks/rag_pipeline/rag_pipeline_run_task.py index 90ebe80daf..f4a092d97e 100644 --- a/api/tasks/rag_pipeline/rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/rag_pipeline_run_task.py @@ -17,7 +17,7 @@ from core.app.entities.rag_pipeline_invoke_entities import RagPipelineInvokeEnti from core.repositories.factory import DifyCoreRepositoryFactory from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.account import Account, Tenant +from models import Account, Tenant from models.dataset import Pipeline from models.enums import WorkflowRunTriggeredFrom from models.workflow import Workflow, WorkflowNodeExecutionTriggeredFrom diff --git a/api/tasks/retry_document_indexing_task.py b/api/tasks/retry_document_indexing_task.py index 9c12696824..9d208647e6 100644 --- a/api/tasks/retry_document_indexing_task.py +++ b/api/tasks/retry_document_indexing_task.py @@ -10,7 +10,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.datetime_utils import naive_utc_now -from models.account import Account, Tenant +from models import Account, Tenant from models.dataset import Dataset, Document, DocumentSegment from services.feature_service import FeatureService from services.rag_pipeline.rag_pipeline import RagPipelineService diff --git a/api/templates/without-brand/change_mail_confirm_new_template_en-US.html b/api/templates/without-brand/change_mail_confirm_new_template_en-US.html index 69a8978f42..861b1bcdb6 100644 --- a/api/templates/without-brand/change_mail_confirm_new_template_en-US.html +++ b/api/templates/without-brand/change_mail_confirm_new_template_en-US.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -96,7 +98,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -107,7 +110,7 @@

Confirm Your New Email Address

-

You’re updating the email address linked to your Dify account.

+

You're updating the email address linked to your account.

To confirm this action, please use the verification code below.

This code will only be valid for the next 5 minutes:

@@ -118,5 +121,4 @@ - - + \ No newline at end of file diff --git a/api/templates/without-brand/change_mail_confirm_new_template_zh-CN.html b/api/templates/without-brand/change_mail_confirm_new_template_zh-CN.html index e3e9e7c45a..e411680e89 100644 --- a/api/templates/without-brand/change_mail_confirm_new_template_zh-CN.html +++ b/api/templates/without-brand/change_mail_confirm_new_template_zh-CN.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -96,7 +98,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -107,7 +110,7 @@

确认您的邮箱地址变更

-

您正在更新与您的 Dify 账户关联的邮箱地址。

+

您正在更新与您的账户关联的邮箱地址。

为了确认此操作,请使用以下验证码。

此验证码仅在接下来的5分钟内有效:

@@ -118,5 +121,4 @@ - - + \ No newline at end of file diff --git a/api/templates/without-brand/change_mail_confirm_old_template_en-US.html b/api/templates/without-brand/change_mail_confirm_old_template_en-US.html index 9d79fa7ff9..9fe52255a5 100644 --- a/api/templates/without-brand/change_mail_confirm_old_template_en-US.html +++ b/api/templates/without-brand/change_mail_confirm_old_template_en-US.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -96,7 +98,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -107,7 +110,7 @@

Verify Your Request to Change Email

-

We received a request to change the email address associated with your Dify account.

+

We received a request to change the email address associated with your account.

To confirm this action, please use the verification code below.

This code will only be valid for the next 5 minutes:

@@ -118,5 +121,4 @@ - - + \ No newline at end of file diff --git a/api/templates/without-brand/change_mail_confirm_old_template_zh-CN.html b/api/templates/without-brand/change_mail_confirm_old_template_zh-CN.html index 41f0839190..98cbd2f0c6 100644 --- a/api/templates/without-brand/change_mail_confirm_old_template_zh-CN.html +++ b/api/templates/without-brand/change_mail_confirm_old_template_zh-CN.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -96,7 +98,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -107,7 +110,7 @@

验证您的邮箱变更请求

-

我们收到了一个变更您 Dify 账户关联邮箱地址的请求。

+

我们收到了一个变更您账户关联邮箱地址的请求。

此验证码仅在接下来的5分钟内有效:

@@ -117,5 +120,4 @@
- - + \ No newline at end of file diff --git a/api/templates/without-brand/invite_member_mail_template_en-US.html b/api/templates/without-brand/invite_member_mail_template_en-US.html index fc7f3679ba..f9157284fa 100644 --- a/api/templates/without-brand/invite_member_mail_template_en-US.html +++ b/api/templates/without-brand/invite_member_mail_template_en-US.html @@ -1,5 +1,6 @@ + +
-
- - Dify Logo -
+

Dear {{ to }},

-

{{ inviter_name }} is pleased to invite you to join our workspace on {{application_title}}, a platform specifically designed for LLM application development. On {{application_title}}, you can explore, create, and collaborate to build and operate AI applications.

+

{{ inviter_name }} is pleased to invite you to join our workspace on {{application_title}}, a + platform specifically designed for LLM application development. On {{application_title}}, you can explore, + create, and collaborate to build and operate AI applications.

Click the button below to log in to {{application_title}} and join the workspace.

-

Login Here

+

Login Here

Best regards,

{{application_title}} Team

- + \ No newline at end of file diff --git a/api/templates/without-brand/transfer_workspace_new_owner_notify_template_en-US.html b/api/templates/without-brand/transfer_workspace_new_owner_notify_template_en-US.html index a5758a2184..659c285324 100644 --- a/api/templates/without-brand/transfer_workspace_new_owner_notify_template_en-US.html +++ b/api/templates/without-brand/transfer_workspace_new_owner_notify_template_en-US.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -80,10 +82,9 @@

You have been assigned as the new owner of the workspace "{{WorkspaceName}}".

As the new owner, you now have full administrative privileges for this workspace.

-

If you have any questions, please contact support@dify.ai.

+

If you have any questions, please contact support.

- - + \ No newline at end of file diff --git a/api/templates/without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html b/api/templates/without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html index 53bab92552..f710dbb289 100644 --- a/api/templates/without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html +++ b/api/templates/without-brand/transfer_workspace_new_owner_notify_template_zh-CN.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -80,10 +82,9 @@

您已被分配为工作空间“{{WorkspaceName}}”的新所有者。

作为新所有者,您现在对该工作空间拥有完全的管理权限。

-

如果您有任何问题,请联系support@dify.ai。

+

如果您有任何问题,请联系支持团队。

- - + \ No newline at end of file diff --git a/api/templates/without-brand/transfer_workspace_old_owner_notify_template_en-US.html b/api/templates/without-brand/transfer_workspace_old_owner_notify_template_en-US.html index 3e7faeb01e..149ec77aea 100644 --- a/api/templates/without-brand/transfer_workspace_old_owner_notify_template_en-US.html +++ b/api/templates/without-brand/transfer_workspace_old_owner_notify_template_en-US.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -97,7 +99,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -108,12 +111,14 @@

Workspace ownership has been transferred

-

You have successfully transferred ownership of the workspace "{{WorkspaceName}}" to {{NewOwnerEmail}}.

-

You no longer have owner privileges for this workspace. Your access level has been changed to Admin.

-

If you did not initiate this transfer or have concerns about this change, please contact support@dify.ai immediately.

+

You have successfully transferred ownership of the workspace "{{WorkspaceName}}" to + {{NewOwnerEmail}}.

+

You no longer have owner privileges for this workspace. Your access level has been changed to + Admin.

+

If you did not initiate this transfer or have concerns about this change, please contact + support immediately.

- - + \ No newline at end of file diff --git a/api/templates/without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html b/api/templates/without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html index 31e3c23140..d7aed40068 100644 --- a/api/templates/without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html +++ b/api/templates/without-brand/transfer_workspace_old_owner_notify_template_zh-CN.html @@ -42,7 +42,8 @@ font-family: Inter; font-style: normal; font-weight: 600; - line-height: 120%; /* 28.8px */ + line-height: 120%; + /* 28.8px */ } .description { @@ -51,7 +52,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -97,7 +99,8 @@ font-family: Inter; font-style: normal; font-weight: 400; - line-height: 20px; /* 142.857% */ + line-height: 20px; + /* 142.857% */ letter-spacing: -0.07px; } @@ -110,10 +113,9 @@

您已成功将工作空间“{{WorkspaceName}}”的所有权转移给{{NewOwnerEmail}}。

您不再拥有此工作空间的拥有者权限。您的访问级别已更改为管理员。

-

如果您没有发起此转移或对此变更有任何疑问,请立即联系support@dify.ai。

+

如果您没有发起此转移或对此变更有任何疑问,请立即联系支持团队。

- - + \ No newline at end of file diff --git a/api/tests/integration_tests/workflow/nodes/test_code.py b/api/tests/integration_tests/workflow/nodes/test_code.py index b62d8aa544..78878cdeef 100644 --- a/api/tests/integration_tests/workflow/nodes/test_code.py +++ b/api/tests/integration_tests/workflow/nodes/test_code.py @@ -5,12 +5,13 @@ import pytest from configs import dify_config from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.node_events import NodeRunResult from core.workflow.nodes.code.code_node import CodeNode from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom from tests.integration_tests.workflow.nodes.__mock.code_executor import setup_code_executor_mock diff --git a/api/tests/integration_tests/workflow/nodes/test_http.py b/api/tests/integration_tests/workflow/nodes/test_http.py index ea99beacaa..2367990d3e 100644 --- a/api/tests/integration_tests/workflow/nodes/test_http.py +++ b/api/tests/integration_tests/workflow/nodes/test_http.py @@ -5,10 +5,11 @@ from urllib.parse import urlencode import pytest from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.nodes.http_request.node import HttpRequestNode from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom from tests.integration_tests.workflow.nodes.__mock.http import setup_http_mock @@ -174,13 +175,13 @@ def test_custom_authorization_header(setup_http_mock): @pytest.mark.parametrize("setup_http_mock", [["none"]], indirect=True) def test_custom_auth_with_empty_api_key_does_not_set_header(setup_http_mock): """Test: In custom authentication mode, when the api_key is empty, no header should be set.""" - from core.workflow.entities.variable_pool import VariablePool from core.workflow.nodes.http_request.entities import ( HttpRequestNodeAuthorization, HttpRequestNodeData, HttpRequestNodeTimeout, ) from core.workflow.nodes.http_request.executor import Executor + from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable # Create variable pool diff --git a/api/tests/integration_tests/workflow/nodes/test_llm.py b/api/tests/integration_tests/workflow/nodes/test_llm.py index 31281cd8ad..3b16c3920b 100644 --- a/api/tests/integration_tests/workflow/nodes/test_llm.py +++ b/api/tests/integration_tests/workflow/nodes/test_llm.py @@ -6,12 +6,13 @@ from unittest.mock import MagicMock, patch from core.app.entities.app_invoke_entities import InvokeFrom from core.llm_generator.output_parser.structured_output import _parse_structured_output -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.node_events import StreamCompletedEvent from core.workflow.nodes.llm.node import LLMNode from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from extensions.ext_database import db from models.enums import UserFrom diff --git a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py index 76918f689f..9d9102cee2 100644 --- a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py +++ b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py @@ -5,11 +5,12 @@ from unittest.mock import MagicMock from core.app.entities.app_invoke_entities import InvokeFrom from core.model_runtime.entities import AssistantPromptMessage -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.nodes.parameter_extractor.parameter_extractor_node import ParameterExtractorNode +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from extensions.ext_database import db from models.enums import UserFrom diff --git a/api/tests/integration_tests/workflow/nodes/test_template_transform.py b/api/tests/integration_tests/workflow/nodes/test_template_transform.py index 53252c7f2e..285387b817 100644 --- a/api/tests/integration_tests/workflow/nodes/test_template_transform.py +++ b/api/tests/integration_tests/workflow/nodes/test_template_transform.py @@ -4,11 +4,12 @@ import uuid import pytest from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.nodes.template_transform.template_transform_node import TemplateTransformNode +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom from tests.integration_tests.workflow.nodes.__mock.code_executor import setup_code_executor_mock diff --git a/api/tests/integration_tests/workflow/nodes/test_tool.py b/api/tests/integration_tests/workflow/nodes/test_tool.py index 16d44d1eaf..8dd8150b1c 100644 --- a/api/tests/integration_tests/workflow/nodes/test_tool.py +++ b/api/tests/integration_tests/workflow/nodes/test_tool.py @@ -4,12 +4,13 @@ from unittest.mock import MagicMock from core.app.entities.app_invoke_entities import InvokeFrom from core.tools.utils.configuration import ToolParameterConfigurationManager -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.node_events import StreamCompletedEvent from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.nodes.tool.tool_node import ToolNode +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom diff --git a/api/tests/test_containers_integration_tests/services/test_account_service.py b/api/tests/test_containers_integration_tests/services/test_account_service.py index 6eff73a8f3..c59fc50f08 100644 --- a/api/tests/test_containers_integration_tests/services/test_account_service.py +++ b/api/tests/test_containers_integration_tests/services/test_account_service.py @@ -8,7 +8,7 @@ from werkzeug.exceptions import Unauthorized from configs import dify_config from controllers.console.error import AccountNotFound, NotAllowedCreateWorkspace -from models.account import AccountStatus, TenantAccountJoin +from models import AccountStatus, TenantAccountJoin from services.account_service import AccountService, RegisterService, TenantService, TokenPair from services.errors.account import ( AccountAlreadyInTenantError, @@ -470,7 +470,7 @@ class TestAccountService: # Verify integration was created from extensions.ext_database import db - from models.account import AccountIntegrate + from models import AccountIntegrate integration = db.session.query(AccountIntegrate).filter_by(account_id=account.id, provider="new-google").first() assert integration is not None @@ -505,7 +505,7 @@ class TestAccountService: # Verify integration was updated from extensions.ext_database import db - from models.account import AccountIntegrate + from models import AccountIntegrate integration = ( db.session.query(AccountIntegrate).filter_by(account_id=account.id, provider="exists-google").first() @@ -2303,7 +2303,7 @@ class TestRegisterService: # Verify account was created from extensions.ext_database import db - from models.account import Account + from models import Account from models.model import DifySetup account = db.session.query(Account).filter_by(email=admin_email).first() @@ -2352,7 +2352,7 @@ class TestRegisterService: # Verify no entities were created (rollback worked) from extensions.ext_database import db - from models.account import Account, Tenant, TenantAccountJoin + from models import Account, Tenant, TenantAccountJoin from models.model import DifySetup account = db.session.query(Account).filter_by(email=admin_email).first() @@ -2446,7 +2446,7 @@ class TestRegisterService: # Verify OAuth integration was created from extensions.ext_database import db - from models.account import AccountIntegrate + from models import AccountIntegrate integration = db.session.query(AccountIntegrate).filter_by(account_id=account.id, provider=provider).first() assert integration is not None @@ -2472,7 +2472,7 @@ class TestRegisterService: mock_external_service_dependencies["billing_service"].is_email_in_freeze.return_value = False # Execute registration with pending status - from models.account import AccountStatus + from models import AccountStatus account = RegisterService.register( email=email, @@ -2661,7 +2661,7 @@ class TestRegisterService: # Verify new account was created with pending status from extensions.ext_database import db - from models.account import Account, TenantAccountJoin + from models import Account, TenantAccountJoin new_account = db.session.query(Account).filter_by(email=new_member_email).first() assert new_account is not None diff --git a/api/tests/test_containers_integration_tests/services/test_agent_service.py b/api/tests/test_containers_integration_tests/services/test_agent_service.py index c572ddc925..ca513319b2 100644 --- a/api/tests/test_containers_integration_tests/services/test_agent_service.py +++ b/api/tests/test_containers_integration_tests/services/test_agent_service.py @@ -5,7 +5,7 @@ import pytest from faker import Faker from core.plugin.impl.exc import PluginDaemonClientSideError -from models.account import Account +from models import Account from models.model import AppModelConfig, Conversation, EndUser, Message, MessageAgentThought from services.account_service import AccountService, TenantService from services.agent_service import AgentService diff --git a/api/tests/test_containers_integration_tests/services/test_annotation_service.py b/api/tests/test_containers_integration_tests/services/test_annotation_service.py index 3cb7424df8..2b03ec1c26 100644 --- a/api/tests/test_containers_integration_tests/services/test_annotation_service.py +++ b/api/tests/test_containers_integration_tests/services/test_annotation_service.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from werkzeug.exceptions import NotFound -from models.account import Account +from models import Account from models.model import MessageAnnotation from services.annotation_service import AppAnnotationService from services.app_service import AppService @@ -25,9 +25,7 @@ class TestAnnotationService: patch("services.annotation_service.enable_annotation_reply_task") as mock_enable_task, patch("services.annotation_service.disable_annotation_reply_task") as mock_disable_task, patch("services.annotation_service.batch_import_annotations_task") as mock_batch_import_task, - patch( - "services.annotation_service.current_user", create_autospec(Account, instance=True) - ) as mock_current_user, + patch("services.annotation_service.current_account_with_tenant") as mock_current_account_with_tenant, ): # Setup default mock returns mock_account_feature_service.get_features.return_value.billing.enabled = False @@ -38,6 +36,9 @@ class TestAnnotationService: mock_disable_task.delay.return_value = None mock_batch_import_task.delay.return_value = None + # Create mock user that will be returned by current_account_with_tenant + mock_user = create_autospec(Account, instance=True) + yield { "account_feature_service": mock_account_feature_service, "feature_service": mock_feature_service, @@ -47,7 +48,8 @@ class TestAnnotationService: "enable_task": mock_enable_task, "disable_task": mock_disable_task, "batch_import_task": mock_batch_import_task, - "current_user": mock_current_user, + "current_account_with_tenant": mock_current_account_with_tenant, + "current_user": mock_user, } def _create_test_app_and_account(self, db_session_with_containers, mock_external_service_dependencies): @@ -107,6 +109,11 @@ class TestAnnotationService: """ mock_external_service_dependencies["current_user"].id = account_id mock_external_service_dependencies["current_user"].current_tenant_id = tenant_id + # Configure current_account_with_tenant to return (user, tenant_id) + mock_external_service_dependencies["current_account_with_tenant"].return_value = ( + mock_external_service_dependencies["current_user"], + tenant_id, + ) def _create_test_conversation(self, app, account, fake): """ diff --git a/api/tests/test_containers_integration_tests/services/test_app_service.py b/api/tests/test_containers_integration_tests/services/test_app_service.py index cbbbbddb21..e53392bcef 100644 --- a/api/tests/test_containers_integration_tests/services/test_app_service.py +++ b/api/tests/test_containers_integration_tests/services/test_app_service.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from constants.model_template import default_app_templates -from models.account import Account +from models import Account from models.model import App, Site from services.account_service import AccountService, TenantService from services.app_service import AppService diff --git a/api/tests/test_containers_integration_tests/services/test_file_service.py b/api/tests/test_containers_integration_tests/services/test_file_service.py index e6bfc157c7..4c94e42f3e 100644 --- a/api/tests/test_containers_integration_tests/services/test_file_service.py +++ b/api/tests/test_containers_integration_tests/services/test_file_service.py @@ -8,7 +8,7 @@ from sqlalchemy import Engine from werkzeug.exceptions import NotFound from configs import dify_config -from models.account import Account, Tenant +from models import Account, Tenant from models.enums import CreatorUserRole from models.model import EndUser, UploadFile from services.errors.file import FileTooLargeError, UnsupportedFileTypeError diff --git a/api/tests/test_containers_integration_tests/services/test_metadata_service.py b/api/tests/test_containers_integration_tests/services/test_metadata_service.py index 253791cc2d..c8ced3f3a5 100644 --- a/api/tests/test_containers_integration_tests/services/test_metadata_service.py +++ b/api/tests/test_containers_integration_tests/services/test_metadata_service.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from core.rag.index_processor.constant.built_in_field import BuiltInField -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, DatasetMetadata, DatasetMetadataBinding, Document from services.entities.knowledge_entities.knowledge_entities import MetadataArgs from services.metadata_service import MetadataService @@ -17,9 +17,7 @@ class TestMetadataService: def mock_external_service_dependencies(self): """Mock setup for external service dependencies.""" with ( - patch( - "services.metadata_service.current_user", create_autospec(Account, instance=True) - ) as mock_current_user, + patch("libs.login.current_user", create_autospec(Account, instance=True)) as mock_current_user, patch("services.metadata_service.redis_client") as mock_redis_client, patch("services.dataset_service.DocumentService") as mock_document_service, ): diff --git a/api/tests/test_containers_integration_tests/services/test_model_provider_service.py b/api/tests/test_containers_integration_tests/services/test_model_provider_service.py index fb319a4963..8cb3572c47 100644 --- a/api/tests/test_containers_integration_tests/services/test_model_provider_service.py +++ b/api/tests/test_containers_integration_tests/services/test_model_provider_service.py @@ -5,7 +5,7 @@ from faker import Faker from core.entities.model_entities import ModelStatus from core.model_runtime.entities.model_entities import FetchFrom, ModelType -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.provider import Provider, ProviderModel, ProviderModelSetting, ProviderType from services.model_provider_service import ModelProviderService diff --git a/api/tests/test_containers_integration_tests/services/test_tag_service.py b/api/tests/test_containers_integration_tests/services/test_tag_service.py index 3d1226019b..6732b8d558 100644 --- a/api/tests/test_containers_integration_tests/services/test_tag_service.py +++ b/api/tests/test_containers_integration_tests/services/test_tag_service.py @@ -5,7 +5,7 @@ from faker import Faker from sqlalchemy import select from werkzeug.exceptions import NotFound -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset from models.model import App, Tag, TagBinding from services.tag_service import TagService diff --git a/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py b/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py index 5db7901cbc..bbbf48ede9 100644 --- a/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py +++ b/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py @@ -5,7 +5,7 @@ from faker import Faker from sqlalchemy import select from core.app.entities.app_invoke_entities import InvokeFrom -from models.account import Account +from models import Account from models.model import Conversation, EndUser from models.web import PinnedConversation from services.account_service import AccountService, TenantService diff --git a/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py b/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py index 059767458a..73e622b061 100644 --- a/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py +++ b/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py @@ -7,7 +7,7 @@ from faker import Faker from werkzeug.exceptions import NotFound, Unauthorized from libs.password import hash_password -from models.account import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole from models.model import App, Site from services.errors.account import AccountLoginError, AccountNotFoundError, AccountPasswordError from services.webapp_auth_service import WebAppAuthService, WebAppAuthType @@ -863,13 +863,14 @@ class TestWebAppAuthService: - Mock service integration """ # Arrange: Setup mock for enterprise service - mock_webapp_auth = type("MockWebAppAuth", (), {"access_mode": "sso_verified"})() + mock_external_service_dependencies["app_service"].get_app_id_by_code.return_value = "mock_app_id" + setting = type("MockWebAppAuth", (), {"access_mode": "sso_verified"})() mock_external_service_dependencies[ "enterprise_service" - ].WebAppAuth.get_app_access_mode_by_code.return_value = mock_webapp_auth + ].WebAppAuth.get_app_access_mode_by_id.return_value = setting # Act: Execute authentication type determination - result = WebAppAuthService.get_app_auth_type(app_code="mock_app_code") + result: WebAppAuthType = WebAppAuthService.get_app_auth_type(app_code="mock_app_code") # Assert: Verify correct result assert result == WebAppAuthType.EXTERNAL @@ -877,7 +878,7 @@ class TestWebAppAuthService: # Verify mock service was called correctly mock_external_service_dependencies[ "enterprise_service" - ].WebAppAuth.get_app_access_mode_by_code.assert_called_once_with("mock_app_code") + ].WebAppAuth.get_app_access_mode_by_id.assert_called_once_with(app_id="mock_app_id") def test_get_app_auth_type_no_parameters(self, db_session_with_containers, mock_external_service_dependencies): """ diff --git a/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py b/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py index 62c9bead86..66bd4d3cd9 100644 --- a/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py +++ b/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py @@ -789,6 +789,31 @@ class TestWorkflowAppService: assert result_account_filter["total"] == 3 assert all(log.created_by_role == CreatorUserRole.ACCOUNT for log in result_account_filter["data"]) + # Test filtering by changed account email + original_email = account.email + new_email = "changed@example.com" + account.email = new_email + db_session_with_containers.commit() + + assert account.email == new_email + + # Results for new email, is expected to be the same as the original email + result_with_new_email = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, created_by_account=new_email, page=1, limit=20 + ) + assert result_with_new_email["total"] == 3 + assert all(log.created_by_role == CreatorUserRole.ACCOUNT for log in result_with_new_email["data"]) + + # Old email unbound, is unexpected input, should raise ValueError + with pytest.raises(ValueError) as exc_info: + service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, created_by_account=original_email, page=1, limit=20 + ) + assert "Account not found" in str(exc_info.value) + + account.email = original_email + db_session_with_containers.commit() + # Test filtering by non-existent session ID result_no_session = service.get_paginate_workflow_app_logs( session=db_session_with_containers, @@ -799,15 +824,16 @@ class TestWorkflowAppService: ) assert result_no_session["total"] == 0 - # Test filtering by non-existent account email - result_no_account = service.get_paginate_workflow_app_logs( - session=db_session_with_containers, - app_model=app, - created_by_account="nonexistent@example.com", - page=1, - limit=20, - ) - assert result_no_account["total"] == 0 + # Test filtering by non-existent account email, is unexpected input, should raise ValueError + with pytest.raises(ValueError) as exc_info: + service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_by_account="nonexistent@example.com", + page=1, + limit=20, + ) + assert "Account not found" in str(exc_info.value) def test_get_paginate_workflow_app_logs_with_uuid_keyword_search( self, db_session_with_containers, mock_external_service_dependencies @@ -1057,15 +1083,15 @@ class TestWorkflowAppService: assert len(result_no_session["data"]) == 0 # Test with account email that doesn't exist - result_no_account = service.get_paginate_workflow_app_logs( - session=db_session_with_containers, - app_model=app, - created_by_account="nonexistent@example.com", - page=1, - limit=20, - ) - assert result_no_account["total"] == 0 - assert len(result_no_account["data"]) == 0 + with pytest.raises(ValueError) as exc_info: + service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_by_account="nonexistent@example.com", + page=1, + limit=20, + ) + assert "Account not found" in str(exc_info.value) def test_get_paginate_workflow_app_logs_with_complex_query_combinations( self, db_session_with_containers, mock_external_service_dependencies diff --git a/api/tests/test_containers_integration_tests/services/test_workspace_service.py b/api/tests/test_containers_integration_tests/services/test_workspace_service.py index 814d1908bd..4249642bc9 100644 --- a/api/tests/test_containers_integration_tests/services/test_workspace_service.py +++ b/api/tests/test_containers_integration_tests/services/test_workspace_service.py @@ -3,7 +3,7 @@ from unittest.mock import patch import pytest from faker import Faker -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from services.workspace_service import WorkspaceService diff --git a/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py b/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py index 7366b08439..0871467a05 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py @@ -3,7 +3,7 @@ from unittest.mock import patch import pytest from faker import Faker -from models.account import Account, Tenant +from models import Account, Tenant from models.tools import ApiToolProvider from services.tools.api_tools_manage_service import ApiToolManageService diff --git a/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py b/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py index f7a4c53318..71d55c3ade 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_mcp_tools_manage_service.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from core.tools.entities.tool_entities import ToolProviderType -from models.account import Account, Tenant +from models import Account, Tenant from models.tools import MCPToolProvider from services.tools.mcp_tools_manage_service import UNCHANGED_SERVER_URL_PLACEHOLDER, MCPToolManageService diff --git a/api/tests/test_containers_integration_tests/services/workflow/test_workflow_converter.py b/api/tests/test_containers_integration_tests/services/workflow/test_workflow_converter.py index 88aa0b6e72..2c5e719a58 100644 --- a/api/tests/test_containers_integration_tests/services/workflow/test_workflow_converter.py +++ b/api/tests/test_containers_integration_tests/services/workflow/test_workflow_converter.py @@ -15,7 +15,7 @@ from core.app.app_config.entities import ( ) from core.model_runtime.entities.llm_entities import LLMMode from core.prompt.utils.prompt_template_parser import PromptTemplateParser -from models.account import Account, Tenant +from models import Account, Tenant from models.api_based_extension import APIBasedExtension from models.model import App, AppMode, AppModelConfig from models.workflow import Workflow diff --git a/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py index 96e673d855..68e485107c 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py @@ -6,7 +6,7 @@ from faker import Faker from core.rag.index_processor.constant.index_type import IndexType from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, DatasetAutoDisableLog, Document, DocumentSegment from tasks.add_document_to_index_task import add_document_to_index_task diff --git a/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py b/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py index 8628e2af7f..f94c5b19e6 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py @@ -14,7 +14,7 @@ from faker import Faker from extensions.ext_database import db from libs.datetime_utils import naive_utc_now -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from models.model import UploadFile from tasks.batch_clean_document_task import batch_clean_document_task diff --git a/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py index a9cfb6ffd4..1b844d6357 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py @@ -18,7 +18,7 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from models.enums import CreatorUserRole from models.model import UploadFile diff --git a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py index 99061d215f..45eb9d4f78 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py @@ -17,7 +17,7 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import ( AppDatasetJoin, Dataset, diff --git a/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py index 987ebf8aca..8004175b2d 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py @@ -13,7 +13,7 @@ import pytest from faker import Faker from extensions.ext_redis import redis_client -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from tasks.create_segment_to_index_task import create_segment_to_index_task diff --git a/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py index bc3701d098..8785c948d1 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py @@ -16,7 +16,7 @@ from faker import Faker from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from tasks.disable_segment_from_index_task import disable_segment_from_index_task diff --git a/api/tests/test_containers_integration_tests/tasks/test_document_indexing_task.py b/api/tests/test_containers_integration_tests/tasks/test_document_indexing_task.py index a315577b78..448f6da5ec 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_document_indexing_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_document_indexing_task.py @@ -4,7 +4,7 @@ import pytest from faker import Faker from extensions.ext_database import db -from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document from tasks.document_indexing_task import document_indexing_task diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_owner_transfer_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_owner_transfer_task.py new file mode 100644 index 0000000000..e128b06b11 --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_owner_transfer_task.py @@ -0,0 +1,401 @@ +""" +TestContainers-based integration tests for mail_owner_transfer_task. + +This module provides comprehensive integration tests for the mail owner transfer tasks +using TestContainers to ensure real email service integration and proper functionality +testing with actual database and service dependencies. +""" + +import logging +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from libs.email_i18n import EmailType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from tasks.mail_owner_transfer_task import ( + send_new_owner_transfer_notify_email_task, + send_old_owner_transfer_notify_email_task, + send_owner_transfer_confirm_task, +) + +logger = logging.getLogger(__name__) + + +class TestMailOwnerTransferTask: + """Integration tests for mail owner transfer tasks using testcontainers.""" + + @pytest.fixture + def mock_mail_dependencies(self): + """Mock setup for mail service dependencies.""" + with ( + patch("tasks.mail_owner_transfer_task.mail") as mock_mail, + patch("tasks.mail_owner_transfer_task.get_email_i18n_service") as mock_get_email_service, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email service + mock_email_service = MagicMock() + mock_get_email_service.return_value = mock_email_service + + yield { + "mail": mock_mail, + "email_service": mock_email_service, + "get_email_service": mock_get_email_service, + } + + def _create_test_account_and_tenant(self, db_session_with_containers): + """ + Helper method to create test account and tenant for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + + Returns: + tuple: (account, tenant) - Created account and tenant instances + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + db_session_with_containers.add(account) + db_session_with_containers.commit() + + # Create tenant + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db_session_with_containers.add(tenant) + db_session_with_containers.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER.value, + current=True, + ) + db_session_with_containers.add(join) + db_session_with_containers.commit() + + return account, tenant + + def test_send_owner_transfer_confirm_task_success(self, db_session_with_containers, mock_mail_dependencies): + """ + Test successful owner transfer confirmation email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls with right parameters + - Email template context is properly constructed + """ + # Arrange: Create test data + account, tenant = self._create_test_account_and_tenant(db_session_with_containers) + + test_language = "en-US" + test_email = account.email + test_code = "123456" + test_workspace = tenant.name + + # Act: Execute the task + send_owner_transfer_confirm_task( + language=test_language, + to=test_email, + code=test_code, + workspace=test_workspace, + ) + + # Assert: Verify the expected outcomes + mock_mail_dependencies["mail"].is_inited.assert_called_once() + mock_mail_dependencies["get_email_service"].assert_called_once() + + # Verify email service was called with correct parameters + mock_mail_dependencies["email_service"].send_email.assert_called_once() + call_args = mock_mail_dependencies["email_service"].send_email.call_args + + assert call_args[1]["email_type"] == EmailType.OWNER_TRANSFER_CONFIRM + assert call_args[1]["language_code"] == test_language + assert call_args[1]["to"] == test_email + assert call_args[1]["template_context"]["to"] == test_email + assert call_args[1]["template_context"]["code"] == test_code + assert call_args[1]["template_context"]["WorkspaceName"] == test_workspace + + def test_send_owner_transfer_confirm_task_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test owner transfer confirmation email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Set mail service as not initialized + mock_mail_dependencies["mail"].is_inited.return_value = False + + test_language = "en-US" + test_email = "test@example.com" + test_code = "123456" + test_workspace = "Test Workspace" + + # Act: Execute the task + send_owner_transfer_confirm_task( + language=test_language, + to=test_email, + code=test_code, + workspace=test_workspace, + ) + + # Assert: Verify no email service calls were made + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_owner_transfer_confirm_task_exception_handling( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test exception handling in owner transfer confirmation email. + + This test verifies: + - Exceptions are properly caught and logged + - No exceptions are propagated to caller + - Email service calls are attempted + - Error logging works correctly + """ + # Arrange: Setup email service to raise exception + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + test_language = "en-US" + test_email = "test@example.com" + test_code = "123456" + test_workspace = "Test Workspace" + + # Act & Assert: Verify no exception is raised + try: + send_owner_transfer_confirm_task( + language=test_language, + to=test_email, + code=test_code, + workspace=test_workspace, + ) + except Exception as e: + pytest.fail(f"Task should not raise exceptions, but raised: {e}") + + # Verify email service was called despite the exception + mock_mail_dependencies["email_service"].send_email.assert_called_once() + + def test_send_old_owner_transfer_notify_email_task_success( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test successful old owner transfer notification email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls with right parameters + - Email template context includes new owner email + """ + # Arrange: Create test data + account, tenant = self._create_test_account_and_tenant(db_session_with_containers) + + test_language = "en-US" + test_email = account.email + test_workspace = tenant.name + test_new_owner_email = "newowner@example.com" + + # Act: Execute the task + send_old_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + new_owner_email=test_new_owner_email, + ) + + # Assert: Verify the expected outcomes + mock_mail_dependencies["mail"].is_inited.assert_called_once() + mock_mail_dependencies["get_email_service"].assert_called_once() + + # Verify email service was called with correct parameters + mock_mail_dependencies["email_service"].send_email.assert_called_once() + call_args = mock_mail_dependencies["email_service"].send_email.call_args + + assert call_args[1]["email_type"] == EmailType.OWNER_TRANSFER_OLD_NOTIFY + assert call_args[1]["language_code"] == test_language + assert call_args[1]["to"] == test_email + assert call_args[1]["template_context"]["to"] == test_email + assert call_args[1]["template_context"]["WorkspaceName"] == test_workspace + assert call_args[1]["template_context"]["NewOwnerEmail"] == test_new_owner_email + + def test_send_old_owner_transfer_notify_email_task_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test old owner transfer notification email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Set mail service as not initialized + mock_mail_dependencies["mail"].is_inited.return_value = False + + test_language = "en-US" + test_email = "test@example.com" + test_workspace = "Test Workspace" + test_new_owner_email = "newowner@example.com" + + # Act: Execute the task + send_old_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + new_owner_email=test_new_owner_email, + ) + + # Assert: Verify no email service calls were made + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_old_owner_transfer_notify_email_task_exception_handling( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test exception handling in old owner transfer notification email. + + This test verifies: + - Exceptions are properly caught and logged + - No exceptions are propagated to caller + - Email service calls are attempted + - Error logging works correctly + """ + # Arrange: Setup email service to raise exception + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + test_language = "en-US" + test_email = "test@example.com" + test_workspace = "Test Workspace" + test_new_owner_email = "newowner@example.com" + + # Act & Assert: Verify no exception is raised + try: + send_old_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + new_owner_email=test_new_owner_email, + ) + except Exception as e: + pytest.fail(f"Task should not raise exceptions, but raised: {e}") + + # Verify email service was called despite the exception + mock_mail_dependencies["email_service"].send_email.assert_called_once() + + def test_send_new_owner_transfer_notify_email_task_success( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test successful new owner transfer notification email sending. + + This test verifies: + - Proper email service initialization check + - Correct email service method calls with right parameters + - Email template context is properly constructed + """ + # Arrange: Create test data + account, tenant = self._create_test_account_and_tenant(db_session_with_containers) + + test_language = "en-US" + test_email = account.email + test_workspace = tenant.name + + # Act: Execute the task + send_new_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + ) + + # Assert: Verify the expected outcomes + mock_mail_dependencies["mail"].is_inited.assert_called_once() + mock_mail_dependencies["get_email_service"].assert_called_once() + + # Verify email service was called with correct parameters + mock_mail_dependencies["email_service"].send_email.assert_called_once() + call_args = mock_mail_dependencies["email_service"].send_email.call_args + + assert call_args[1]["email_type"] == EmailType.OWNER_TRANSFER_NEW_NOTIFY + assert call_args[1]["language_code"] == test_language + assert call_args[1]["to"] == test_email + assert call_args[1]["template_context"]["to"] == test_email + assert call_args[1]["template_context"]["WorkspaceName"] == test_workspace + + def test_send_new_owner_transfer_notify_email_task_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test new owner transfer notification email when mail service is not initialized. + + This test verifies: + - Early return when mail service is not initialized + - No email service calls are made + - No exceptions are raised + """ + # Arrange: Set mail service as not initialized + mock_mail_dependencies["mail"].is_inited.return_value = False + + test_language = "en-US" + test_email = "test@example.com" + test_workspace = "Test Workspace" + + # Act: Execute the task + send_new_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + ) + + # Assert: Verify no email service calls were made + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_new_owner_transfer_notify_email_task_exception_handling( + self, db_session_with_containers, mock_mail_dependencies + ): + """ + Test exception handling in new owner transfer notification email. + + This test verifies: + - Exceptions are properly caught and logged + - No exceptions are propagated to caller + - Email service calls are attempted + - Error logging works correctly + """ + # Arrange: Setup email service to raise exception + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + test_language = "en-US" + test_email = "test@example.com" + test_workspace = "Test Workspace" + + # Act & Assert: Verify no exception is raised + try: + send_new_owner_transfer_notify_email_task( + language=test_language, + to=test_email, + workspace=test_workspace, + ) + except Exception as e: + pytest.fail(f"Task should not raise exceptions, but raised: {e}") + + # Verify email service was called despite the exception + mock_mail_dependencies["email_service"].send_email.assert_called_once() diff --git a/api/tests/test_containers_integration_tests/tasks/test_mail_register_task.py b/api/tests/test_containers_integration_tests/tasks/test_mail_register_task.py new file mode 100644 index 0000000000..e4db14623d --- /dev/null +++ b/api/tests/test_containers_integration_tests/tasks/test_mail_register_task.py @@ -0,0 +1,134 @@ +""" +TestContainers-based integration tests for mail_register_task.py + +This module provides integration tests for email registration tasks +using TestContainers to ensure real database and service interactions. +""" + +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from libs.email_i18n import EmailType +from tasks.mail_register_task import send_email_register_mail_task, send_email_register_mail_task_when_account_exist + + +class TestMailRegisterTask: + """Integration tests for mail_register_task using testcontainers.""" + + @pytest.fixture + def mock_mail_dependencies(self): + """Mock setup for mail service dependencies.""" + with ( + patch("tasks.mail_register_task.mail") as mock_mail, + patch("tasks.mail_register_task.get_email_i18n_service") as mock_get_email_service, + ): + # Setup mock mail service + mock_mail.is_inited.return_value = True + + # Setup mock email i18n service + mock_email_service = MagicMock() + mock_get_email_service.return_value = mock_email_service + + yield { + "mail": mock_mail, + "email_service": mock_email_service, + "get_email_service": mock_get_email_service, + } + + def test_send_email_register_mail_task_success(self, db_session_with_containers, mock_mail_dependencies): + """Test successful email registration mail sending.""" + fake = Faker() + language = "en-US" + to_email = fake.email() + code = fake.numerify("######") + + send_email_register_mail_task(language=language, to=to_email, code=code) + + mock_mail_dependencies["mail"].is_inited.assert_called_once() + mock_mail_dependencies["email_service"].send_email.assert_called_once_with( + email_type=EmailType.EMAIL_REGISTER, + language_code=language, + to=to_email, + template_context={ + "to": to_email, + "code": code, + }, + ) + + def test_send_email_register_mail_task_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """Test email registration task when mail service is not initialized.""" + mock_mail_dependencies["mail"].is_inited.return_value = False + + send_email_register_mail_task(language="en-US", to="test@example.com", code="123456") + + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_email_register_mail_task_exception_handling(self, db_session_with_containers, mock_mail_dependencies): + """Test email registration task exception handling.""" + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + fake = Faker() + to_email = fake.email() + code = fake.numerify("######") + + with patch("tasks.mail_register_task.logger") as mock_logger: + send_email_register_mail_task(language="en-US", to=to_email, code=code) + mock_logger.exception.assert_called_once_with("Send email register mail to %s failed", to_email) + + def test_send_email_register_mail_task_when_account_exist_success( + self, db_session_with_containers, mock_mail_dependencies + ): + """Test successful email registration mail sending when account exists.""" + fake = Faker() + language = "en-US" + to_email = fake.email() + account_name = fake.name() + + with patch("tasks.mail_register_task.dify_config") as mock_config: + mock_config.CONSOLE_WEB_URL = "https://console.dify.ai" + + send_email_register_mail_task_when_account_exist(language=language, to=to_email, account_name=account_name) + + mock_mail_dependencies["email_service"].send_email.assert_called_once_with( + email_type=EmailType.EMAIL_REGISTER_WHEN_ACCOUNT_EXIST, + language_code=language, + to=to_email, + template_context={ + "to": to_email, + "login_url": "https://console.dify.ai/signin", + "reset_password_url": "https://console.dify.ai/reset-password", + "account_name": account_name, + }, + ) + + def test_send_email_register_mail_task_when_account_exist_mail_not_initialized( + self, db_session_with_containers, mock_mail_dependencies + ): + """Test account exist email task when mail service is not initialized.""" + mock_mail_dependencies["mail"].is_inited.return_value = False + + send_email_register_mail_task_when_account_exist( + language="en-US", to="test@example.com", account_name="Test User" + ) + + mock_mail_dependencies["get_email_service"].assert_not_called() + mock_mail_dependencies["email_service"].send_email.assert_not_called() + + def test_send_email_register_mail_task_when_account_exist_exception_handling( + self, db_session_with_containers, mock_mail_dependencies + ): + """Test account exist email task exception handling.""" + mock_mail_dependencies["email_service"].send_email.side_effect = Exception("Email service error") + + fake = Faker() + to_email = fake.email() + account_name = fake.name() + + with patch("tasks.mail_register_task.logger") as mock_logger: + send_email_register_mail_task_when_account_exist(language="en-US", to=to_email, account_name=account_name) + mock_logger.exception.assert_called_once_with("Send email register mail to %s failed", to_email) diff --git a/api/tests/unit_tests/controllers/console/auth/test_oauth.py b/api/tests/unit_tests/controllers/console/auth/test_oauth.py index 67f4b85413..399caf8c4d 100644 --- a/api/tests/unit_tests/controllers/console/auth/test_oauth.py +++ b/api/tests/unit_tests/controllers/console/auth/test_oauth.py @@ -179,9 +179,7 @@ class TestOAuthCallback: oauth_setup["provider"].get_access_token.assert_called_once_with("test_code") oauth_setup["provider"].get_user_info.assert_called_once_with("access_token") - mock_redirect.assert_called_once_with( - "http://localhost:3000?access_token=jwt_access_token&refresh_token=jwt_refresh_token" - ) + mock_redirect.assert_called_once_with("http://localhost:3000") @pytest.mark.parametrize( ("exception", "expected_error"), @@ -224,8 +222,8 @@ class TestOAuthCallback: # CLOSED status: Currently NOT handled, will proceed to login (security issue) # This documents actual behavior. See test_defensive_check_for_closed_account_status for details ( - AccountStatus.CLOSED, - "http://localhost:3000?access_token=jwt_access_token&refresh_token=jwt_refresh_token", + AccountStatus.CLOSED.value, + "http://localhost:3000", ), ], ) @@ -268,6 +266,7 @@ class TestOAuthCallback: mock_token_pair = MagicMock() mock_token_pair.access_token = "jwt_access_token" mock_token_pair.refresh_token = "jwt_refresh_token" + mock_token_pair.csrf_token = "csrf_token" mock_account_service.login.return_value = mock_token_pair with app.test_request_context("/auth/oauth/github/callback?code=test_code"): @@ -299,6 +298,12 @@ class TestOAuthCallback: mock_account.status = AccountStatus.PENDING mock_generate_account.return_value = mock_account + mock_token_pair = MagicMock() + mock_token_pair.access_token = "jwt_access_token" + mock_token_pair.refresh_token = "jwt_refresh_token" + mock_token_pair.csrf_token = "csrf_token" + mock_account_service.login.return_value = mock_token_pair + with app.test_request_context("/auth/oauth/github/callback?code=test_code"): resource.get("github") @@ -361,6 +366,7 @@ class TestOAuthCallback: mock_token_pair = MagicMock() mock_token_pair.access_token = "jwt_access_token" mock_token_pair.refresh_token = "jwt_refresh_token" + mock_token_pair.csrf_token = "csrf_token" mock_account_service.login.return_value = mock_token_pair # Execute OAuth callback @@ -368,9 +374,7 @@ class TestOAuthCallback: resource.get("github") # Verify current behavior: login succeeds (this is NOT ideal) - mock_redirect.assert_called_once_with( - "http://localhost:3000?access_token=jwt_access_token&refresh_token=jwt_refresh_token" - ) + mock_redirect.assert_called_once_with("http://localhost:3000") mock_account_service.login.assert_called_once() # Document expected behavior in comments: diff --git a/api/tests/unit_tests/controllers/console/test_wraps.py b/api/tests/unit_tests/controllers/console/test_wraps.py index 5d132cb787..6777077de8 100644 --- a/api/tests/unit_tests/controllers/console/test_wraps.py +++ b/api/tests/unit_tests/controllers/console/test_wraps.py @@ -60,7 +60,7 @@ class TestAccountInitialization: return "success" # Act - with patch("controllers.console.wraps._current_account", return_value=mock_user): + with patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_user, "tenant123")): result = protected_view() # Assert @@ -77,7 +77,7 @@ class TestAccountInitialization: return "success" # Act & Assert - with patch("controllers.console.wraps._current_account", return_value=mock_user): + with patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_user, "tenant123")): with pytest.raises(AccountNotInitializedError): protected_view() @@ -163,7 +163,9 @@ class TestBillingResourceLimits: return "member_added" # Act - with patch("controllers.console.wraps._current_account", return_value=MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", return_value=(MockUser("test_user"), "tenant123") + ): with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features): result = add_member() @@ -185,7 +187,10 @@ class TestBillingResourceLimits: # Act & Assert with app.test_request_context(): - with patch("controllers.console.wraps._current_account", return_value=MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", + return_value=(MockUser("test_user"), "tenant123"), + ): with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features): with pytest.raises(Exception) as exc_info: add_member() @@ -207,7 +212,10 @@ class TestBillingResourceLimits: # Test 1: Should reject when source is datasets with app.test_request_context("/?source=datasets"): - with patch("controllers.console.wraps._current_account", return_value=MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", + return_value=(MockUser("test_user"), "tenant123"), + ): with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features): with pytest.raises(Exception) as exc_info: upload_document() @@ -215,7 +223,10 @@ class TestBillingResourceLimits: # Test 2: Should allow when source is not datasets with app.test_request_context("/?source=other"): - with patch("controllers.console.wraps._current_account", return_value=MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", + return_value=(MockUser("test_user"), "tenant123"), + ): with patch("controllers.console.wraps.FeatureService.get_features", return_value=mock_features): result = upload_document() assert result == "document_uploaded" @@ -239,7 +250,9 @@ class TestRateLimiting: return "knowledge_success" # Act - with patch("controllers.console.wraps._current_account", return_value=MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", return_value=(MockUser("test_user"), "tenant123") + ): with patch( "controllers.console.wraps.FeatureService.get_knowledge_rate_limit", return_value=mock_rate_limit ): @@ -271,7 +284,10 @@ class TestRateLimiting: # Act & Assert with app.test_request_context(): - with patch("controllers.console.wraps._current_account", return_value=MockUser("test_user")): + with patch( + "controllers.console.wraps.current_account_with_tenant", + return_value=(MockUser("test_user"), "tenant123"), + ): with patch( "controllers.console.wraps.FeatureService.get_knowledge_rate_limit", return_value=mock_rate_limit ): diff --git a/api/tests/unit_tests/core/app/apps/advanced_chat/test_app_runner_conversation_variables.py b/api/tests/unit_tests/core/app/apps/advanced_chat/test_app_runner_conversation_variables.py index bb1d5e2f67..3a4fdc3cd8 100644 --- a/api/tests/unit_tests/core/app/apps/advanced_chat/test_app_runner_conversation_variables.py +++ b/api/tests/unit_tests/core/app/apps/advanced_chat/test_app_runner_conversation_variables.py @@ -99,6 +99,8 @@ class TestAdvancedChatAppRunnerConversationVariables: workflow=mock_workflow, system_user_id=str(uuid4()), app=MagicMock(), + workflow_execution_repository=MagicMock(), + workflow_node_execution_repository=MagicMock(), ) # Mock database session @@ -237,6 +239,8 @@ class TestAdvancedChatAppRunnerConversationVariables: workflow=mock_workflow, system_user_id=str(uuid4()), app=MagicMock(), + workflow_execution_repository=MagicMock(), + workflow_node_execution_repository=MagicMock(), ) # Mock database session @@ -390,6 +394,8 @@ class TestAdvancedChatAppRunnerConversationVariables: workflow=mock_workflow, system_user_id=str(uuid4()), app=MagicMock(), + workflow_execution_repository=MagicMock(), + workflow_node_execution_repository=MagicMock(), ) # Mock database session diff --git a/api/tests/unit_tests/core/app/apps/common/test_graph_runtime_state_support.py b/api/tests/unit_tests/core/app/apps/common/test_graph_runtime_state_support.py new file mode 100644 index 0000000000..cd5ea8986a --- /dev/null +++ b/api/tests/unit_tests/core/app/apps/common/test_graph_runtime_state_support.py @@ -0,0 +1,63 @@ +from types import SimpleNamespace + +import pytest + +from core.app.apps.common.graph_runtime_state_support import GraphRuntimeStateSupport +from core.workflow.runtime import GraphRuntimeState +from core.workflow.runtime.variable_pool import VariablePool +from core.workflow.system_variable import SystemVariable + + +def _make_state(workflow_run_id: str | None) -> GraphRuntimeState: + variable_pool = VariablePool(system_variables=SystemVariable(workflow_execution_id=workflow_run_id)) + return GraphRuntimeState(variable_pool=variable_pool, start_at=0.0) + + +class _StubPipeline(GraphRuntimeStateSupport): + def __init__(self, *, cached_state: GraphRuntimeState | None, queue_state: GraphRuntimeState | None): + self._graph_runtime_state = cached_state + self._base_task_pipeline = SimpleNamespace(queue_manager=SimpleNamespace(graph_runtime_state=queue_state)) + + +def test_ensure_graph_runtime_initialized_caches_explicit_state(): + explicit_state = _make_state("run-explicit") + pipeline = _StubPipeline(cached_state=None, queue_state=None) + + resolved = pipeline._ensure_graph_runtime_initialized(explicit_state) + + assert resolved is explicit_state + assert pipeline._graph_runtime_state is explicit_state + + +def test_resolve_graph_runtime_state_reads_from_queue_when_cache_empty(): + queued_state = _make_state("run-queue") + pipeline = _StubPipeline(cached_state=None, queue_state=queued_state) + + resolved = pipeline._resolve_graph_runtime_state() + + assert resolved is queued_state + assert pipeline._graph_runtime_state is queued_state + + +def test_resolve_graph_runtime_state_raises_when_no_state_available(): + pipeline = _StubPipeline(cached_state=None, queue_state=None) + + with pytest.raises(ValueError): + pipeline._resolve_graph_runtime_state() + + +def test_extract_workflow_run_id_returns_value(): + state = _make_state("run-identifier") + pipeline = _StubPipeline(cached_state=state, queue_state=None) + + run_id = pipeline._extract_workflow_run_id(state) + + assert run_id == "run-identifier" + + +def test_extract_workflow_run_id_raises_when_missing(): + state = _make_state(None) + pipeline = _StubPipeline(cached_state=state, queue_state=None) + + with pytest.raises(ValueError): + pipeline._extract_workflow_run_id(state) diff --git a/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_process_data.py b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_process_data.py index 3366666a47..abe09fb8a4 100644 --- a/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_process_data.py +++ b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_process_data.py @@ -3,8 +3,7 @@ Unit tests for WorkflowResponseConverter focusing on process_data truncation fun """ import uuid -from dataclasses import dataclass -from datetime import datetime +from collections.abc import Mapping from typing import Any from unittest.mock import Mock @@ -12,24 +11,17 @@ import pytest from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter from core.app.entities.app_invoke_entities import WorkflowAppGenerateEntity -from core.app.entities.queue_entities import QueueNodeRetryEvent, QueueNodeSucceededEvent -from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution, WorkflowNodeExecutionStatus +from core.app.entities.queue_entities import ( + QueueNodeRetryEvent, + QueueNodeStartedEvent, + QueueNodeSucceededEvent, +) from core.workflow.enums import NodeType +from core.workflow.system_variable import SystemVariable from libs.datetime_utils import naive_utc_now from models import Account -@dataclass -class ProcessDataResponseScenario: - """Test scenario for process_data in responses.""" - - name: str - original_process_data: dict[str, Any] | None - truncated_process_data: dict[str, Any] | None - expected_response_data: dict[str, Any] | None - expected_truncated_flag: bool - - class TestWorkflowResponseConverterCenarios: """Test process_data truncation in WorkflowResponseConverter.""" @@ -39,6 +31,7 @@ class TestWorkflowResponseConverterCenarios: mock_app_config = Mock() mock_app_config.tenant_id = "test-tenant-id" mock_entity.app_config = mock_app_config + mock_entity.inputs = {} return mock_entity def create_workflow_response_converter(self) -> WorkflowResponseConverter: @@ -50,54 +43,59 @@ class TestWorkflowResponseConverterCenarios: mock_user.name = "Test User" mock_user.email = "test@example.com" - return WorkflowResponseConverter(application_generate_entity=mock_entity, user=mock_user) - - def create_workflow_node_execution( - self, - process_data: dict[str, Any] | None = None, - truncated_process_data: dict[str, Any] | None = None, - execution_id: str = "test-execution-id", - ) -> WorkflowNodeExecution: - """Create a WorkflowNodeExecution for testing.""" - execution = WorkflowNodeExecution( - id=execution_id, - workflow_id="test-workflow-id", - workflow_execution_id="test-run-id", - index=1, - node_id="test-node-id", - node_type=NodeType.LLM, - title="Test Node", - process_data=process_data, - status=WorkflowNodeExecutionStatus.SUCCEEDED, - created_at=datetime.now(), - finished_at=datetime.now(), + system_variables = SystemVariable(workflow_id="wf-id", workflow_execution_id="initial-run-id") + return WorkflowResponseConverter( + application_generate_entity=mock_entity, + user=mock_user, + system_variables=system_variables, ) - if truncated_process_data is not None: - execution.set_truncated_process_data(truncated_process_data) + def create_node_started_event(self, *, node_execution_id: str | None = None) -> QueueNodeStartedEvent: + """Create a QueueNodeStartedEvent for testing.""" + return QueueNodeStartedEvent( + node_execution_id=node_execution_id or str(uuid.uuid4()), + node_id="test-node-id", + node_title="Test Node", + node_type=NodeType.CODE, + start_at=naive_utc_now(), + predecessor_node_id=None, + in_iteration_id=None, + in_loop_id=None, + provider_type="built-in", + provider_id="code", + ) - return execution - - def create_node_succeeded_event(self) -> QueueNodeSucceededEvent: + def create_node_succeeded_event( + self, + *, + node_execution_id: str, + process_data: Mapping[str, Any] | None = None, + ) -> QueueNodeSucceededEvent: """Create a QueueNodeSucceededEvent for testing.""" return QueueNodeSucceededEvent( node_id="test-node-id", node_type=NodeType.CODE, - node_execution_id=str(uuid.uuid4()), + node_execution_id=node_execution_id, start_at=naive_utc_now(), - parallel_id=None, - parallel_start_node_id=None, - parent_parallel_id=None, - parent_parallel_start_node_id=None, in_iteration_id=None, in_loop_id=None, + inputs={}, + process_data=process_data or {}, + outputs={}, + execution_metadata={}, ) - def create_node_retry_event(self) -> QueueNodeRetryEvent: + def create_node_retry_event( + self, + *, + node_execution_id: str, + process_data: Mapping[str, Any] | None = None, + ) -> QueueNodeRetryEvent: """Create a QueueNodeRetryEvent for testing.""" return QueueNodeRetryEvent( inputs={"data": "inputs"}, outputs={"data": "outputs"}, + process_data=process_data or {}, error="oops", retry_index=1, node_id="test-node-id", @@ -105,12 +103,8 @@ class TestWorkflowResponseConverterCenarios: node_title="test code", provider_type="built-in", provider_id="code", - node_execution_id=str(uuid.uuid4()), + node_execution_id=node_execution_id, start_at=naive_utc_now(), - parallel_id=None, - parallel_start_node_id=None, - parent_parallel_id=None, - parent_parallel_start_node_id=None, in_iteration_id=None, in_loop_id=None, ) @@ -122,15 +116,28 @@ class TestWorkflowResponseConverterCenarios: original_data = {"large_field": "x" * 10000, "metadata": "info"} truncated_data = {"large_field": "[TRUNCATED]", "metadata": "info"} - execution = self.create_workflow_node_execution( - process_data=original_data, truncated_process_data=truncated_data + converter.workflow_start_to_stream_response(task_id="bootstrap", workflow_run_id="run-id", workflow_id="wf-id") + start_event = self.create_node_started_event() + converter.workflow_node_start_to_stream_response( + event=start_event, + task_id="test-task-id", ) - event = self.create_node_succeeded_event() + + event = self.create_node_succeeded_event( + node_execution_id=start_event.node_execution_id, + process_data=original_data, + ) + + def fake_truncate(mapping): + if mapping == dict(original_data): + return truncated_data, True + return mapping, False + + converter._truncator.truncate_variable_mapping = fake_truncate # type: ignore[assignment] response = converter.workflow_node_finish_to_stream_response( event=event, task_id="test-task-id", - workflow_node_execution=execution, ) # Response should use truncated data, not original @@ -145,13 +152,26 @@ class TestWorkflowResponseConverterCenarios: original_data = {"small": "data"} - execution = self.create_workflow_node_execution(process_data=original_data) - event = self.create_node_succeeded_event() + converter.workflow_start_to_stream_response(task_id="bootstrap", workflow_run_id="run-id", workflow_id="wf-id") + start_event = self.create_node_started_event() + converter.workflow_node_start_to_stream_response( + event=start_event, + task_id="test-task-id", + ) + + event = self.create_node_succeeded_event( + node_execution_id=start_event.node_execution_id, + process_data=original_data, + ) + + def fake_truncate(mapping): + return mapping, False + + converter._truncator.truncate_variable_mapping = fake_truncate # type: ignore[assignment] response = converter.workflow_node_finish_to_stream_response( event=event, task_id="test-task-id", - workflow_node_execution=execution, ) # Response should use original data @@ -163,18 +183,31 @@ class TestWorkflowResponseConverterCenarios: """Test node finish response when process_data is None.""" converter = self.create_workflow_response_converter() - execution = self.create_workflow_node_execution(process_data=None) - event = self.create_node_succeeded_event() + converter.workflow_start_to_stream_response(task_id="bootstrap", workflow_run_id="run-id", workflow_id="wf-id") + start_event = self.create_node_started_event() + converter.workflow_node_start_to_stream_response( + event=start_event, + task_id="test-task-id", + ) + + event = self.create_node_succeeded_event( + node_execution_id=start_event.node_execution_id, + process_data=None, + ) + + def fake_truncate(mapping): + return mapping, False + + converter._truncator.truncate_variable_mapping = fake_truncate # type: ignore[assignment] response = converter.workflow_node_finish_to_stream_response( event=event, task_id="test-task-id", - workflow_node_execution=execution, ) - # Response should have None process_data + # Response should normalize missing process_data to an empty mapping assert response is not None - assert response.data.process_data is None + assert response.data.process_data == {} assert response.data.process_data_truncated is False def test_workflow_node_retry_response_uses_truncated_process_data(self): @@ -184,15 +217,28 @@ class TestWorkflowResponseConverterCenarios: original_data = {"large_field": "x" * 10000, "metadata": "info"} truncated_data = {"large_field": "[TRUNCATED]", "metadata": "info"} - execution = self.create_workflow_node_execution( - process_data=original_data, truncated_process_data=truncated_data + converter.workflow_start_to_stream_response(task_id="bootstrap", workflow_run_id="run-id", workflow_id="wf-id") + start_event = self.create_node_started_event() + converter.workflow_node_start_to_stream_response( + event=start_event, + task_id="test-task-id", ) - event = self.create_node_retry_event() + + event = self.create_node_retry_event( + node_execution_id=start_event.node_execution_id, + process_data=original_data, + ) + + def fake_truncate(mapping): + if mapping == dict(original_data): + return truncated_data, True + return mapping, False + + converter._truncator.truncate_variable_mapping = fake_truncate # type: ignore[assignment] response = converter.workflow_node_retry_to_stream_response( event=event, task_id="test-task-id", - workflow_node_execution=execution, ) # Response should use truncated data, not original @@ -207,224 +253,72 @@ class TestWorkflowResponseConverterCenarios: original_data = {"small": "data"} - execution = self.create_workflow_node_execution(process_data=original_data) - event = self.create_node_retry_event() + converter.workflow_start_to_stream_response(task_id="bootstrap", workflow_run_id="run-id", workflow_id="wf-id") + start_event = self.create_node_started_event() + converter.workflow_node_start_to_stream_response( + event=start_event, + task_id="test-task-id", + ) + + event = self.create_node_retry_event( + node_execution_id=start_event.node_execution_id, + process_data=original_data, + ) + + def fake_truncate(mapping): + return mapping, False + + converter._truncator.truncate_variable_mapping = fake_truncate # type: ignore[assignment] response = converter.workflow_node_retry_to_stream_response( event=event, task_id="test-task-id", - workflow_node_execution=execution, ) - # Response should use original data assert response is not None assert response.data.process_data == original_data assert response.data.process_data_truncated is False def test_iteration_and_loop_nodes_return_none(self): - """Test that iteration and loop nodes return None (no change from existing behavior).""" + """Test that iteration and loop nodes return None (no streaming events).""" converter = self.create_workflow_response_converter() - # Test iteration node - iteration_execution = self.create_workflow_node_execution(process_data={"test": "data"}) - iteration_execution.node_type = NodeType.ITERATION - - event = self.create_node_succeeded_event() - - response = converter.workflow_node_finish_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=iteration_execution, - ) - - # Should return None for iteration nodes - assert response is None - - # Test loop node - loop_execution = self.create_workflow_node_execution(process_data={"test": "data"}) - loop_execution.node_type = NodeType.LOOP - - response = converter.workflow_node_finish_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=loop_execution, - ) - - # Should return None for loop nodes - assert response is None - - def test_execution_without_workflow_execution_id_returns_none(self): - """Test that executions without workflow_execution_id return None.""" - converter = self.create_workflow_response_converter() - - execution = self.create_workflow_node_execution(process_data={"test": "data"}) - execution.workflow_execution_id = None # Single-step debugging - - event = self.create_node_succeeded_event() - - response = converter.workflow_node_finish_to_stream_response( - event=event, - task_id="test-task-id", - workflow_node_execution=execution, - ) - - # Should return None for single-step debugging - assert response is None - - @staticmethod - def get_process_data_response_scenarios() -> list[ProcessDataResponseScenario]: - """Create test scenarios for process_data responses.""" - return [ - ProcessDataResponseScenario( - name="none_process_data", - original_process_data=None, - truncated_process_data=None, - expected_response_data=None, - expected_truncated_flag=False, - ), - ProcessDataResponseScenario( - name="small_process_data_no_truncation", - original_process_data={"small": "data"}, - truncated_process_data=None, - expected_response_data={"small": "data"}, - expected_truncated_flag=False, - ), - ProcessDataResponseScenario( - name="large_process_data_with_truncation", - original_process_data={"large": "x" * 10000, "metadata": "info"}, - truncated_process_data={"large": "[TRUNCATED]", "metadata": "info"}, - expected_response_data={"large": "[TRUNCATED]", "metadata": "info"}, - expected_truncated_flag=True, - ), - ProcessDataResponseScenario( - name="empty_process_data", - original_process_data={}, - truncated_process_data=None, - expected_response_data={}, - expected_truncated_flag=False, - ), - ProcessDataResponseScenario( - name="complex_data_with_truncation", - original_process_data={ - "logs": ["entry"] * 1000, # Large array - "config": {"setting": "value"}, - "status": "processing", - }, - truncated_process_data={ - "logs": "[TRUNCATED: 1000 items]", - "config": {"setting": "value"}, - "status": "processing", - }, - expected_response_data={ - "logs": "[TRUNCATED: 1000 items]", - "config": {"setting": "value"}, - "status": "processing", - }, - expected_truncated_flag=True, - ), - ] - - @pytest.mark.parametrize( - "scenario", - get_process_data_response_scenarios(), - ids=[scenario.name for scenario in get_process_data_response_scenarios()], - ) - def test_node_finish_response_scenarios(self, scenario: ProcessDataResponseScenario): - """Test various scenarios for node finish responses.""" - - mock_user = Mock(spec=Account) - mock_user.id = "test-user-id" - mock_user.name = "Test User" - mock_user.email = "test@example.com" - - converter = WorkflowResponseConverter( - application_generate_entity=Mock(spec=WorkflowAppGenerateEntity, app_config=Mock(tenant_id="test-tenant")), - user=mock_user, - ) - - execution = WorkflowNodeExecution( - id="test-execution-id", - workflow_id="test-workflow-id", - workflow_execution_id="test-run-id", - index=1, - node_id="test-node-id", - node_type=NodeType.LLM, - title="Test Node", - process_data=scenario.original_process_data, - status=WorkflowNodeExecutionStatus.SUCCEEDED, - created_at=datetime.now(), - finished_at=datetime.now(), - ) - - if scenario.truncated_process_data is not None: - execution.set_truncated_process_data(scenario.truncated_process_data) - - event = QueueNodeSucceededEvent( - node_id="test-node-id", - node_type=NodeType.CODE, + iteration_event = QueueNodeSucceededEvent( + node_id="iteration-node", + node_type=NodeType.ITERATION, node_execution_id=str(uuid.uuid4()), start_at=naive_utc_now(), - parallel_id=None, - parallel_start_node_id=None, - parent_parallel_id=None, - parent_parallel_start_node_id=None, in_iteration_id=None, in_loop_id=None, + inputs={}, + process_data={}, + outputs={}, + execution_metadata={}, ) response = converter.workflow_node_finish_to_stream_response( - event=event, + event=iteration_event, task_id="test-task-id", - workflow_node_execution=execution, ) + assert response is None - assert response is not None - assert response.data.process_data == scenario.expected_response_data - assert response.data.process_data_truncated == scenario.expected_truncated_flag - - @pytest.mark.parametrize( - "scenario", - get_process_data_response_scenarios(), - ids=[scenario.name for scenario in get_process_data_response_scenarios()], - ) - def test_node_retry_response_scenarios(self, scenario: ProcessDataResponseScenario): - """Test various scenarios for node retry responses.""" - - mock_user = Mock(spec=Account) - mock_user.id = "test-user-id" - mock_user.name = "Test User" - mock_user.email = "test@example.com" - - converter = WorkflowResponseConverter( - application_generate_entity=Mock(spec=WorkflowAppGenerateEntity, app_config=Mock(tenant_id="test-tenant")), - user=mock_user, - ) - - execution = WorkflowNodeExecution( - id="test-execution-id", - workflow_id="test-workflow-id", - workflow_execution_id="test-run-id", - index=1, - node_id="test-node-id", - node_type=NodeType.LLM, - title="Test Node", - process_data=scenario.original_process_data, - status=WorkflowNodeExecutionStatus.FAILED, # Retry scenario - created_at=datetime.now(), - finished_at=datetime.now(), - ) - - if scenario.truncated_process_data is not None: - execution.set_truncated_process_data(scenario.truncated_process_data) - - event = self.create_node_retry_event() - - response = converter.workflow_node_retry_to_stream_response( - event=event, + loop_event = iteration_event.model_copy(update={"node_type": NodeType.LOOP}) + response = converter.workflow_node_finish_to_stream_response( + event=loop_event, task_id="test-task-id", - workflow_node_execution=execution, + ) + assert response is None + + def test_finish_without_start_raises(self): + """Ensure finish responses require a prior workflow start.""" + converter = self.create_workflow_response_converter() + event = self.create_node_succeeded_event( + node_execution_id=str(uuid.uuid4()), + process_data={}, ) - assert response is not None - assert response.data.process_data == scenario.expected_response_data - assert response.data.process_data_truncated == scenario.expected_truncated_flag + with pytest.raises(ValueError): + converter.workflow_node_finish_to_stream_response( + event=event, + task_id="test-task-id", + ) diff --git a/api/tests/unit_tests/core/rag/datasource/vdb/alibabacloud_mysql/test_alibabacloud_mysql_vector.py b/api/tests/unit_tests/core/rag/datasource/vdb/alibabacloud_mysql/test_alibabacloud_mysql_vector.py index 44fe272c8c..8ccd739e64 100644 --- a/api/tests/unit_tests/core/rag/datasource/vdb/alibabacloud_mysql/test_alibabacloud_mysql_vector.py +++ b/api/tests/unit_tests/core/rag/datasource/vdb/alibabacloud_mysql/test_alibabacloud_mysql_vector.py @@ -110,19 +110,6 @@ class TestAlibabaCloudMySQLVector(unittest.TestCase): assert mock_cursor.execute.call_count >= 3 # CREATE TABLE + 2 indexes mock_redis.set.assert_called_once() - def test_config_validation(self): - """Test configuration validation.""" - # Test missing required fields - with pytest.raises(ValueError): - AlibabaCloudMySQLVectorConfig( - host="", # Empty host should raise error - port=3306, - user="test", - password="test", - database="test", - max_connection=5, - ) - @patch( "core.rag.datasource.vdb.alibabacloud_mysql.alibabacloud_mysql_vector.mysql.connector.pooling.MySQLConnectionPool" ) @@ -718,5 +705,29 @@ class TestAlibabaCloudMySQLVector(unittest.TestCase): mock_cursor.fetchone.side_effect = [{"VERSION()": "8.0.36"}, {"vector_support": True}] +@pytest.mark.parametrize( + "invalid_config_override", + [ + {"host": ""}, # Test empty host + {"port": 0}, # Test invalid port + {"max_connection": 0}, # Test invalid max_connection + ], +) +def test_config_validation_parametrized(invalid_config_override): + """Test configuration validation for various invalid inputs using parametrize.""" + config = { + "host": "localhost", + "port": 3306, + "user": "test", + "password": "test", + "database": "test", + "max_connection": 5, + } + config.update(invalid_config_override) + + with pytest.raises(ValueError): + AlibabaCloudMySQLVectorConfig(**config) + + if __name__ == "__main__": unittest.main() diff --git a/api/tests/unit_tests/core/tools/test_tool_entities.py b/api/tests/unit_tests/core/tools/test_tool_entities.py new file mode 100644 index 0000000000..a5b7e8a9a3 --- /dev/null +++ b/api/tests/unit_tests/core/tools/test_tool_entities.py @@ -0,0 +1,29 @@ +from core.tools.entities.common_entities import I18nObject +from core.tools.entities.tool_entities import ToolEntity, ToolIdentity, ToolInvokeMessage + + +def _make_identity() -> ToolIdentity: + return ToolIdentity( + author="author", + name="tool", + label=I18nObject(en_US="Label"), + provider="builtin", + ) + + +def test_log_message_metadata_none_defaults_to_empty_dict(): + log_message = ToolInvokeMessage.LogMessage( + id="log-1", + label="Log entry", + status=ToolInvokeMessage.LogMessage.LogStatus.START, + data={}, + metadata=None, + ) + + assert log_message.metadata == {} + + +def test_tool_entity_output_schema_none_defaults_to_empty_dict(): + entity = ToolEntity(identity=_make_identity(), output_schema=None) + + assert entity.output_schema == {} diff --git a/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py b/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py index 17e3ebeea0..c68aad0b22 100644 --- a/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py +++ b/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py @@ -34,12 +34,17 @@ def test_workflow_tool_should_raise_tool_invoke_error_when_result_has_error_fiel monkeypatch.setattr(tool, "_get_app", lambda *args, **kwargs: None) monkeypatch.setattr(tool, "_get_workflow", lambda *args, **kwargs: None) + # Mock user resolution to avoid database access + from unittest.mock import Mock + + mock_user = Mock() + monkeypatch.setattr(tool, "_resolve_user", lambda *args, **kwargs: mock_user) + # replace `WorkflowAppGenerator.generate` 's return value. monkeypatch.setattr( "core.app.apps.workflow.app_generator.WorkflowAppGenerator.generate", lambda *args, **kwargs: {"data": {"error": "oops"}}, ) - monkeypatch.setattr("libs.login.current_user", lambda *args, **kwargs: None) with pytest.raises(ToolInvokeError) as exc_info: # WorkflowTool always returns a generator, so we need to iterate to diff --git a/api/tests/unit_tests/core/variables/test_segment.py b/api/tests/unit_tests/core/variables/test_segment.py index 5cd595088a..af4f96ba23 100644 --- a/api/tests/unit_tests/core/variables/test_segment.py +++ b/api/tests/unit_tests/core/variables/test_segment.py @@ -37,7 +37,7 @@ from core.variables.variables import ( Variable, VariableUnion, ) -from core.workflow.entities import VariablePool +from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable diff --git a/api/tests/unit_tests/core/workflow/entities/test_graph_runtime_state.py b/api/tests/unit_tests/core/workflow/entities/test_graph_runtime_state.py index 2614424dc7..5ecaeb60ac 100644 --- a/api/tests/unit_tests/core/workflow/entities/test_graph_runtime_state.py +++ b/api/tests/unit_tests/core/workflow/entities/test_graph_runtime_state.py @@ -1,9 +1,11 @@ +import json from time import time +from unittest.mock import MagicMock, patch import pytest -from core.workflow.entities.graph_runtime_state import GraphRuntimeState -from core.workflow.entities.variable_pool import VariablePool +from core.model_runtime.entities.llm_entities import LLMUsage +from core.workflow.runtime import GraphRuntimeState, ReadOnlyGraphRuntimeStateWrapper, VariablePool class TestGraphRuntimeState: @@ -95,3 +97,141 @@ class TestGraphRuntimeState: # Test add_tokens validation with pytest.raises(ValueError): state.add_tokens(-1) + + def test_ready_queue_default_instantiation(self): + state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time()) + + queue = state.ready_queue + + from core.workflow.graph_engine.ready_queue import InMemoryReadyQueue + + assert isinstance(queue, InMemoryReadyQueue) + assert state.ready_queue is queue + + def test_graph_execution_lazy_instantiation(self): + state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time()) + + execution = state.graph_execution + + from core.workflow.graph_engine.domain.graph_execution import GraphExecution + + assert isinstance(execution, GraphExecution) + assert execution.workflow_id == "" + assert state.graph_execution is execution + + def test_response_coordinator_configuration(self): + variable_pool = VariablePool() + state = GraphRuntimeState(variable_pool=variable_pool, start_at=time()) + + with pytest.raises(ValueError): + _ = state.response_coordinator + + mock_graph = MagicMock() + with patch("core.workflow.graph_engine.response_coordinator.ResponseStreamCoordinator") as coordinator_cls: + coordinator_instance = MagicMock() + coordinator_cls.return_value = coordinator_instance + + state.configure(graph=mock_graph) + + assert state.response_coordinator is coordinator_instance + coordinator_cls.assert_called_once_with(variable_pool=variable_pool, graph=mock_graph) + + # Configure again with same graph should be idempotent + state.configure(graph=mock_graph) + + other_graph = MagicMock() + with pytest.raises(ValueError): + state.attach_graph(other_graph) + + def test_read_only_wrapper_exposes_additional_state(self): + state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time()) + state.configure() + + wrapper = ReadOnlyGraphRuntimeStateWrapper(state) + + assert wrapper.ready_queue_size == 0 + assert wrapper.exceptions_count == 0 + + def test_read_only_wrapper_serializes_runtime_state(self): + state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time()) + state.total_tokens = 5 + state.set_output("result", {"success": True}) + state.ready_queue.put("node-1") + + wrapper = ReadOnlyGraphRuntimeStateWrapper(state) + + wrapper_snapshot = json.loads(wrapper.dumps()) + state_snapshot = json.loads(state.dumps()) + + assert wrapper_snapshot == state_snapshot + + def test_dumps_and_loads_roundtrip_with_response_coordinator(self): + variable_pool = VariablePool() + variable_pool.add(("node1", "value"), "payload") + + state = GraphRuntimeState(variable_pool=variable_pool, start_at=time()) + state.total_tokens = 10 + state.node_run_steps = 3 + state.set_output("final", {"result": True}) + usage = LLMUsage.from_metadata( + { + "prompt_tokens": 2, + "completion_tokens": 3, + "total_tokens": 5, + "total_price": "1.23", + "currency": "USD", + "latency": 0.5, + } + ) + state.llm_usage = usage + state.ready_queue.put("node-A") + + graph_execution = state.graph_execution + graph_execution.workflow_id = "wf-123" + graph_execution.exceptions_count = 4 + graph_execution.started = True + + class StubCoordinator: + def __init__(self) -> None: + self.state = "initial" + + def dumps(self) -> str: + return json.dumps({"state": self.state}) + + def loads(self, data: str) -> None: + payload = json.loads(data) + self.state = payload["state"] + + mock_graph = MagicMock() + stub = StubCoordinator() + with patch.object(GraphRuntimeState, "_build_response_coordinator", return_value=stub): + state.attach_graph(mock_graph) + + stub.state = "configured" + + snapshot = state.dumps() + + restored = GraphRuntimeState(variable_pool=VariablePool(), start_at=0.0) + restored.loads(snapshot) + + assert restored.total_tokens == 10 + assert restored.node_run_steps == 3 + assert restored.get_output("final") == {"result": True} + assert restored.llm_usage.total_tokens == usage.total_tokens + assert restored.ready_queue.qsize() == 1 + assert restored.ready_queue.get(timeout=0.01) == "node-A" + + restored_segment = restored.variable_pool.get(("node1", "value")) + assert restored_segment is not None + assert restored_segment.value == "payload" + + restored_execution = restored.graph_execution + assert restored_execution.workflow_id == "wf-123" + assert restored_execution.exceptions_count == 4 + assert restored_execution.started is True + + new_stub = StubCoordinator() + with patch.object(GraphRuntimeState, "_build_response_coordinator", return_value=new_stub): + restored.attach_graph(mock_graph) + + assert new_stub.state == "configured" diff --git a/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py b/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py index 68fe82d05e..f9de456b19 100644 --- a/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py +++ b/api/tests/unit_tests/core/workflow/entities/test_variable_pool.py @@ -4,7 +4,7 @@ from core.variables.segments import ( NoneSegment, StringSegment, ) -from core.workflow.entities.variable_pool import VariablePool +from core.workflow.runtime import VariablePool class TestVariablePoolGetAndNestedAttribute: diff --git a/api/tests/unit_tests/core/workflow/graph/test_graph_builder.py b/api/tests/unit_tests/core/workflow/graph/test_graph_builder.py new file mode 100644 index 0000000000..15d1dcb48d --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph/test_graph_builder.py @@ -0,0 +1,59 @@ +from unittest.mock import MagicMock + +import pytest + +from core.workflow.enums import NodeType +from core.workflow.graph import Graph +from core.workflow.nodes.base.node import Node + + +def _make_node(node_id: str, node_type: NodeType = NodeType.START) -> Node: + node = MagicMock(spec=Node) + node.id = node_id + node.node_type = node_type + node.execution_type = None # attribute not used in builder path + return node + + +def test_graph_builder_creates_linear_graph(): + builder = Graph.new() + root = _make_node("root", NodeType.START) + mid = _make_node("mid", NodeType.LLM) + end = _make_node("end", NodeType.END) + + graph = builder.add_root(root).add_node(mid).add_node(end).build() + + assert graph.root_node is root + assert graph.nodes == {"root": root, "mid": mid, "end": end} + assert len(graph.edges) == 2 + first_edge = next(iter(graph.edges.values())) + assert first_edge.tail == "root" + assert first_edge.head == "mid" + assert graph.out_edges["mid"] == [edge_id for edge_id, edge in graph.edges.items() if edge.tail == "mid"] + + +def test_graph_builder_supports_custom_predecessor(): + builder = Graph.new() + root = _make_node("root") + branch = _make_node("branch") + other = _make_node("other") + + graph = builder.add_root(root).add_node(branch).add_node(other, from_node_id="root").build() + + outgoing_root = graph.out_edges["root"] + assert len(outgoing_root) == 2 + edge_targets = {graph.edges[eid].head for eid in outgoing_root} + assert edge_targets == {"branch", "other"} + + +def test_graph_builder_validates_usage(): + builder = Graph.new() + node = _make_node("node") + + with pytest.raises(ValueError, match="Root node"): + builder.add_node(node) + + builder.add_root(node) + duplicate = _make_node("node") + with pytest.raises(ValueError, match="Duplicate"): + builder.add_node(duplicate) diff --git a/api/tests/unit_tests/core/workflow/graph_engine/README.md b/api/tests/unit_tests/core/workflow/graph_engine/README.md index bff82b3ac4..3fff4cf6a9 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/README.md +++ b/api/tests/unit_tests/core/workflow/graph_engine/README.md @@ -20,9 +20,6 @@ The TableTestRunner (`test_table_runner.py`) provides a robust table-driven test - **Mock configuration** - Seamless integration with the auto-mock system - **Performance metrics** - Track execution times and bottlenecks - **Detailed error reporting** - Comprehensive failure diagnostics -- **Test tagging** - Organize and filter tests by tags -- **Retry mechanism** - Handle flaky tests gracefully -- **Custom validators** - Define custom validation logic ### Basic Usage @@ -68,49 +65,6 @@ suite_result = runner.run_table_tests( print(f"Success rate: {suite_result.success_rate:.1f}%") ``` -#### Test Tagging and Filtering - -```python -test_case = WorkflowTestCase( - fixture_path="workflow", - inputs={}, - expected_outputs={}, - tags=["smoke", "critical"], -) - -# Run only tests with specific tags -suite_result = runner.run_table_tests( - test_cases, - tags_filter=["smoke"] -) -``` - -#### Retry Mechanism - -```python -test_case = WorkflowTestCase( - fixture_path="flaky_workflow", - inputs={}, - expected_outputs={}, - retry_count=2, # Retry up to 2 times on failure -) -``` - -#### Custom Validators - -```python -def custom_validator(outputs: dict) -> bool: - # Custom validation logic - return "error" not in outputs.get("status", "") - -test_case = WorkflowTestCase( - fixture_path="workflow", - inputs={}, - expected_outputs={"status": "success"}, - custom_validator=custom_validator, -) -``` - #### Event Sequence Validation ```python diff --git a/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py b/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py index 7ebccf83a7..8677325d4e 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/command_channels/test_redis_channel.py @@ -35,11 +35,15 @@ class TestRedisChannel: """Test sending a command to Redis.""" mock_redis = MagicMock() mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + context = MagicMock() + context.__enter__.return_value = mock_pipe + context.__exit__.return_value = None + mock_redis.pipeline.return_value = context channel = RedisChannel(mock_redis, "test:key", 3600) + pending_key = "test:key:pending" + # Create a test command command = GraphEngineCommand(command_type=CommandType.ABORT) @@ -55,6 +59,7 @@ class TestRedisChannel: # Verify expire was set mock_pipe.expire.assert_called_once_with("test:key", 3600) + mock_pipe.set.assert_called_once_with(pending_key, "1", ex=3600) # Verify execute was called mock_pipe.execute.assert_called_once() @@ -62,33 +67,48 @@ class TestRedisChannel: def test_fetch_commands_empty(self): """Test fetching commands when Redis list is empty.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context] - # Simulate empty list - mock_pipe.execute.return_value = [[], 1] # Empty list, delete successful + # No pending marker + pending_pipe.execute.return_value = [None, 0] + mock_redis.llen.return_value = 0 channel = RedisChannel(mock_redis, "test:key") commands = channel.fetch_commands() assert commands == [] - mock_pipe.lrange.assert_called_once_with("test:key", 0, -1) - mock_pipe.delete.assert_called_once_with("test:key") + mock_redis.pipeline.assert_called_once() + fetch_pipe.lrange.assert_not_called() + fetch_pipe.delete.assert_not_called() def test_fetch_commands_with_abort_command(self): """Test fetching abort commands from Redis.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Create abort command data abort_command = AbortCommand() command_json = json.dumps(abort_command.model_dump()) # Simulate Redis returning one command - mock_pipe.execute.return_value = [[command_json.encode()], 1] + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[command_json.encode()], 1] channel = RedisChannel(mock_redis, "test:key") commands = channel.fetch_commands() @@ -100,9 +120,15 @@ class TestRedisChannel: def test_fetch_commands_multiple(self): """Test fetching multiple commands from Redis.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Create multiple commands command1 = GraphEngineCommand(command_type=CommandType.ABORT) @@ -112,7 +138,8 @@ class TestRedisChannel: command2_json = json.dumps(command2.model_dump()) # Simulate Redis returning multiple commands - mock_pipe.execute.return_value = [[command1_json.encode(), command2_json.encode()], 1] + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[command1_json.encode(), command2_json.encode()], 1] channel = RedisChannel(mock_redis, "test:key") commands = channel.fetch_commands() @@ -124,9 +151,15 @@ class TestRedisChannel: def test_fetch_commands_skips_invalid_json(self): """Test that invalid JSON commands are skipped.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Mix valid and invalid JSON valid_command = AbortCommand() @@ -134,7 +167,8 @@ class TestRedisChannel: invalid_json = b"invalid json {" # Simulate Redis returning mixed valid/invalid commands - mock_pipe.execute.return_value = [[invalid_json, valid_json.encode()], 1] + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[invalid_json, valid_json.encode()], 1] channel = RedisChannel(mock_redis, "test:key") commands = channel.fetch_commands() @@ -187,13 +221,20 @@ class TestRedisChannel: def test_atomic_fetch_and_clear(self): """Test that fetch_commands atomically fetches and clears the list.""" mock_redis = MagicMock() - mock_pipe = MagicMock() - mock_redis.pipeline.return_value.__enter__ = MagicMock(return_value=mock_pipe) - mock_redis.pipeline.return_value.__exit__ = MagicMock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] command = AbortCommand() command_json = json.dumps(command.model_dump()) - mock_pipe.execute.return_value = [[command_json.encode()], 1] + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [[command_json.encode()], 1] channel = RedisChannel(mock_redis, "test:key") @@ -202,7 +243,29 @@ class TestRedisChannel: assert len(commands) == 1 # Verify both lrange and delete were called in the pipeline - assert mock_pipe.lrange.call_count == 1 - assert mock_pipe.delete.call_count == 1 - mock_pipe.lrange.assert_called_with("test:key", 0, -1) - mock_pipe.delete.assert_called_with("test:key") + assert fetch_pipe.lrange.call_count == 1 + assert fetch_pipe.delete.call_count == 1 + fetch_pipe.lrange.assert_called_with("test:key", 0, -1) + fetch_pipe.delete.assert_called_with("test:key") + + def test_fetch_commands_without_pending_marker_returns_empty(self): + """Ensure we avoid unnecessary list reads when pending flag is missing.""" + mock_redis = MagicMock() + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] + + # Pending flag absent + pending_pipe.execute.return_value = [None, 0] + channel = RedisChannel(mock_redis, "test:key") + commands = channel.fetch_commands() + + assert commands == [] + mock_redis.llen.assert_not_called() + assert mock_redis.pipeline.call_count == 1 diff --git a/api/tests/unit_tests/core/workflow/graph_engine/event_management/test_event_handlers.py b/api/tests/unit_tests/core/workflow/graph_engine/event_management/test_event_handlers.py index d556bb138e..2b8f04979d 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/event_management/test_event_handlers.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/event_management/test_event_handlers.py @@ -4,7 +4,6 @@ from __future__ import annotations from datetime import datetime -from core.workflow.entities import GraphRuntimeState, VariablePool from core.workflow.enums import NodeExecutionType, NodeState, NodeType, WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.graph_engine.domain.graph_execution import GraphExecution @@ -16,6 +15,7 @@ from core.workflow.graph_engine.response_coordinator.coordinator import Response from core.workflow.graph_events import NodeRunRetryEvent, NodeRunStartedEvent from core.workflow.node_events import NodeRunResult from core.workflow.nodes.base.entities import RetryConfig +from core.workflow.runtime import GraphRuntimeState, VariablePool class _StubEdgeProcessor: diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_command_system.py b/api/tests/unit_tests/core/workflow/graph_engine/test_command_system.py index 9fec855a93..d451e7e608 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_command_system.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_command_system.py @@ -3,12 +3,12 @@ import time from unittest.mock import MagicMock -from core.workflow.entities import GraphRuntimeState, VariablePool from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel -from core.workflow.graph_engine.entities.commands import AbortCommand -from core.workflow.graph_events import GraphRunAbortedEvent, GraphRunStartedEvent +from core.workflow.graph_engine.entities.commands import AbortCommand, CommandType, PauseCommand +from core.workflow.graph_events import GraphRunAbortedEvent, GraphRunPausedEvent, GraphRunStartedEvent +from core.workflow.runtime import GraphRuntimeState, VariablePool def test_abort_command(): @@ -100,8 +100,57 @@ def test_redis_channel_serialization(): assert command_data["command_type"] == "abort" assert command_data["reason"] == "Test abort" + # Test pause command serialization + pause_command = PauseCommand(reason="User requested pause") + channel.send_command(pause_command) -if __name__ == "__main__": - test_abort_command() - test_redis_channel_serialization() - print("All tests passed!") + assert len(mock_pipeline.rpush.call_args_list) == 2 + second_call_args = mock_pipeline.rpush.call_args_list[1] + pause_command_json = second_call_args[0][1] + pause_command_data = json.loads(pause_command_json) + assert pause_command_data["command_type"] == CommandType.PAUSE.value + assert pause_command_data["reason"] == "User requested pause" + + +def test_pause_command(): + """Test that GraphEngine properly handles pause commands.""" + + shared_runtime_state = GraphRuntimeState(variable_pool=VariablePool(), start_at=time.perf_counter()) + + mock_graph = MagicMock(spec=Graph) + mock_graph.nodes = {} + mock_graph.edges = {} + mock_graph.root_node = MagicMock() + mock_graph.root_node.id = "start" + + mock_start_node = MagicMock() + mock_start_node.state = None + mock_start_node.id = "start" + mock_start_node.graph_runtime_state = shared_runtime_state + mock_graph.nodes["start"] = mock_start_node + + mock_graph.get_outgoing_edges = MagicMock(return_value=[]) + mock_graph.get_incoming_edges = MagicMock(return_value=[]) + + command_channel = InMemoryChannel() + + engine = GraphEngine( + workflow_id="test_workflow", + graph=mock_graph, + graph_runtime_state=shared_runtime_state, + command_channel=command_channel, + ) + + pause_command = PauseCommand(reason="User requested pause") + command_channel.send_command(pause_command) + + events = list(engine.run()) + + assert any(isinstance(e, GraphRunStartedEvent) for e in events) + pause_events = [e for e in events if isinstance(e, GraphRunPausedEvent)] + assert len(pause_events) == 1 + assert pause_events[0].reason == "User requested pause" + + graph_execution = engine.graph_runtime_state.graph_execution + assert graph_execution.is_paused + assert graph_execution.pause_reason == "User requested pause" diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_complex_branch_workflow.py b/api/tests/unit_tests/core/workflow/graph_engine/test_complex_branch_workflow.py index fc38393e75..96926797ec 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_complex_branch_workflow.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_complex_branch_workflow.py @@ -7,14 +7,11 @@ This test suite validates the behavior of a workflow that: 3. Handles multiple answer nodes with different outputs """ -import pytest - from core.workflow.graph_events import ( GraphRunStartedEvent, GraphRunSucceededEvent, NodeRunStartedEvent, NodeRunStreamChunkEvent, - NodeRunSucceededEvent, ) from .test_mock_config import MockConfigBuilder @@ -29,7 +26,6 @@ class TestComplexBranchWorkflow: self.runner = TableTestRunner() self.fixture_path = "test_complex_branch" - @pytest.mark.skip(reason="output in this workflow can be random") def test_hello_branch_with_llm(self): """ Test when query contains 'hello' - should trigger true branch. @@ -41,42 +37,17 @@ class TestComplexBranchWorkflow: fixture_path=self.fixture_path, query="hello world", expected_outputs={ - "answer": f"{mock_text_1}contains 'hello'", + "answer": f"contains 'hello'{mock_text_1}", }, description="Basic hello case with parallel LLM execution", use_auto_mock=True, mock_config=(MockConfigBuilder().with_node_output("1755502777322", {"text": mock_text_1}).build()), - expected_event_sequence=[ - GraphRunStartedEvent, - # Start - NodeRunStartedEvent, - NodeRunSucceededEvent, - # If/Else (no streaming) - NodeRunStartedEvent, - NodeRunSucceededEvent, - # LLM (with streaming) - NodeRunStartedEvent, - ] - # LLM - + [NodeRunStreamChunkEvent] * (mock_text_1.count(" ") + 2) - + [ - # Answer's text - NodeRunStreamChunkEvent, - NodeRunSucceededEvent, - # Answer - NodeRunStartedEvent, - NodeRunSucceededEvent, - # Answer 2 - NodeRunStartedEvent, - NodeRunSucceededEvent, - GraphRunSucceededEvent, - ], ), WorkflowTestCase( fixture_path=self.fixture_path, query="say hello to everyone", expected_outputs={ - "answer": "Mocked response for greetingcontains 'hello'", + "answer": "contains 'hello'Mocked response for greeting", }, description="Hello in middle of sentence", use_auto_mock=True, @@ -93,6 +64,35 @@ class TestComplexBranchWorkflow: for result in suite_result.results: assert result.success, f"Test '{result.test_case.description}' failed: {result.error}" assert result.actual_outputs + assert any(isinstance(event, GraphRunStartedEvent) for event in result.events) + assert any(isinstance(event, GraphRunSucceededEvent) for event in result.events) + + start_index = next( + idx for idx, event in enumerate(result.events) if isinstance(event, GraphRunStartedEvent) + ) + success_index = max( + idx for idx, event in enumerate(result.events) if isinstance(event, GraphRunSucceededEvent) + ) + assert start_index < success_index + + started_node_ids = {event.node_id for event in result.events if isinstance(event, NodeRunStartedEvent)} + assert {"1755502773326", "1755502777322"}.issubset(started_node_ids), ( + f"Branch or LLM nodes missing in events: {started_node_ids}" + ) + + assert any(isinstance(event, NodeRunStreamChunkEvent) for event in result.events), ( + "Expected streaming chunks from LLM execution" + ) + + llm_start_index = next( + idx + for idx, event in enumerate(result.events) + if isinstance(event, NodeRunStartedEvent) and event.node_id == "1755502777322" + ) + assert any( + idx > llm_start_index and isinstance(event, NodeRunStreamChunkEvent) + for idx, event in enumerate(result.events) + ), "Streaming chunks should follow LLM node start" def test_non_hello_branch_with_llm(self): """ diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_dispatcher.py b/api/tests/unit_tests/core/workflow/graph_engine/test_dispatcher.py new file mode 100644 index 0000000000..3fe4ce3400 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_dispatcher.py @@ -0,0 +1,109 @@ +"""Tests for dispatcher command checking behavior.""" + +from __future__ import annotations + +import queue +from datetime import datetime + +from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus +from core.workflow.graph_engine.event_management.event_manager import EventManager +from core.workflow.graph_engine.orchestration.dispatcher import Dispatcher +from core.workflow.graph_events import NodeRunStartedEvent, NodeRunSucceededEvent +from core.workflow.node_events import NodeRunResult + + +class _StubExecutionCoordinator: + """Stub execution coordinator that tracks command checks.""" + + def __init__(self) -> None: + self.command_checks = 0 + self.scaling_checks = 0 + self._execution_complete = False + self.mark_complete_called = False + self.failed = False + self._paused = False + + def check_commands(self) -> None: + self.command_checks += 1 + + def check_scaling(self) -> None: + self.scaling_checks += 1 + + @property + def is_paused(self) -> bool: + return self._paused + + def is_execution_complete(self) -> bool: + return self._execution_complete + + def mark_complete(self) -> None: + self.mark_complete_called = True + + def mark_failed(self, error: Exception) -> None: # pragma: no cover - defensive, not triggered in tests + self.failed = True + + def set_execution_complete(self) -> None: + self._execution_complete = True + + +class _StubEventHandler: + """Minimal event handler that marks execution complete after handling an event.""" + + def __init__(self, coordinator: _StubExecutionCoordinator) -> None: + self._coordinator = coordinator + self.events = [] + + def dispatch(self, event) -> None: + self.events.append(event) + self._coordinator.set_execution_complete() + + +def _run_dispatcher_for_event(event) -> int: + """Run the dispatcher loop for a single event and return command check count.""" + event_queue: queue.Queue = queue.Queue() + event_queue.put(event) + + coordinator = _StubExecutionCoordinator() + event_handler = _StubEventHandler(coordinator) + event_manager = EventManager() + + dispatcher = Dispatcher( + event_queue=event_queue, + event_handler=event_handler, + event_collector=event_manager, + execution_coordinator=coordinator, + ) + + dispatcher._dispatcher_loop() + + return coordinator.command_checks + + +def _make_started_event() -> NodeRunStartedEvent: + return NodeRunStartedEvent( + id="start-event", + node_id="node-1", + node_type=NodeType.CODE, + node_title="Test Node", + start_at=datetime.utcnow(), + ) + + +def _make_succeeded_event() -> NodeRunSucceededEvent: + return NodeRunSucceededEvent( + id="success-event", + node_id="node-1", + node_type=NodeType.CODE, + node_title="Test Node", + start_at=datetime.utcnow(), + node_run_result=NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED), + ) + + +def test_dispatcher_checks_commands_during_idle_and_on_completion() -> None: + """Dispatcher polls commands when idle and after completion events.""" + started_checks = _run_dispatcher_for_event(_make_started_event()) + succeeded_checks = _run_dispatcher_for_event(_make_succeeded_event()) + + assert started_checks == 1 + assert succeeded_checks == 2 diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_execution_coordinator.py b/api/tests/unit_tests/core/workflow/graph_engine/test_execution_coordinator.py new file mode 100644 index 0000000000..025393e435 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_execution_coordinator.py @@ -0,0 +1,62 @@ +"""Unit tests for the execution coordinator orchestration logic.""" + +from unittest.mock import MagicMock + +from core.workflow.graph_engine.command_processing.command_processor import CommandProcessor +from core.workflow.graph_engine.domain.graph_execution import GraphExecution +from core.workflow.graph_engine.graph_state_manager import GraphStateManager +from core.workflow.graph_engine.orchestration.execution_coordinator import ExecutionCoordinator +from core.workflow.graph_engine.worker_management.worker_pool import WorkerPool + + +def _build_coordinator(graph_execution: GraphExecution) -> tuple[ExecutionCoordinator, MagicMock, MagicMock]: + command_processor = MagicMock(spec=CommandProcessor) + state_manager = MagicMock(spec=GraphStateManager) + worker_pool = MagicMock(spec=WorkerPool) + + coordinator = ExecutionCoordinator( + graph_execution=graph_execution, + state_manager=state_manager, + command_processor=command_processor, + worker_pool=worker_pool, + ) + return coordinator, state_manager, worker_pool + + +def test_handle_pause_stops_workers_and_clears_state() -> None: + """Paused execution should stop workers and clear executing state.""" + graph_execution = GraphExecution(workflow_id="workflow") + graph_execution.start() + graph_execution.pause("Awaiting human input") + + coordinator, state_manager, worker_pool = _build_coordinator(graph_execution) + + coordinator.handle_pause_if_needed() + + worker_pool.stop.assert_called_once_with() + state_manager.clear_executing.assert_called_once_with() + + +def test_handle_pause_noop_when_execution_running() -> None: + """Running execution should not trigger pause handling.""" + graph_execution = GraphExecution(workflow_id="workflow") + graph_execution.start() + + coordinator, state_manager, worker_pool = _build_coordinator(graph_execution) + + coordinator.handle_pause_if_needed() + + worker_pool.stop.assert_not_called() + state_manager.clear_executing.assert_not_called() + + +def test_is_execution_complete_when_paused() -> None: + """Paused execution should be treated as complete.""" + graph_execution = GraphExecution(workflow_id="workflow") + graph_execution.start() + graph_execution.pause("Awaiting input") + + coordinator, state_manager, _worker_pool = _build_coordinator(graph_execution) + state_manager.is_execution_complete.return_value = False + + assert coordinator.is_execution_complete() diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_human_input_pause_multi_branch.py b/api/tests/unit_tests/core/workflow/graph_engine/test_human_input_pause_multi_branch.py new file mode 100644 index 0000000000..c9e7e31e52 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_human_input_pause_multi_branch.py @@ -0,0 +1,341 @@ +import time +from collections.abc import Iterable + +from core.model_runtime.entities.llm_entities import LLMMode +from core.model_runtime.entities.message_entities import PromptMessageRole +from core.workflow.entities import GraphInitParams +from core.workflow.graph import Graph +from core.workflow.graph_events import ( + GraphRunPausedEvent, + GraphRunStartedEvent, + GraphRunSucceededEvent, + NodeRunPauseRequestedEvent, + NodeRunStartedEvent, + NodeRunStreamChunkEvent, + NodeRunSucceededEvent, +) +from core.workflow.nodes.base.entities import VariableSelector +from core.workflow.nodes.end.end_node import EndNode +from core.workflow.nodes.end.entities import EndNodeData +from core.workflow.nodes.human_input import HumanInputNode +from core.workflow.nodes.human_input.entities import HumanInputNodeData +from core.workflow.nodes.llm.entities import ( + ContextConfig, + LLMNodeChatModelMessage, + LLMNodeData, + ModelConfig, + VisionConfig, +) +from core.workflow.nodes.start.entities import StartNodeData +from core.workflow.nodes.start.start_node import StartNode +from core.workflow.runtime import GraphRuntimeState, VariablePool +from core.workflow.system_variable import SystemVariable + +from .test_mock_config import MockConfig +from .test_mock_nodes import MockLLMNode +from .test_table_runner import TableTestRunner, WorkflowTestCase + + +def _build_branching_graph(mock_config: MockConfig) -> tuple[Graph, GraphRuntimeState]: + graph_config: dict[str, object] = {"nodes": [], "edges": []} + graph_init_params = GraphInitParams( + tenant_id="tenant", + app_id="app", + workflow_id="workflow", + graph_config=graph_config, + user_id="user", + user_from="account", + invoke_from="debugger", + call_depth=0, + ) + + variable_pool = VariablePool( + system_variables=SystemVariable(user_id="user", app_id="app", workflow_id="workflow"), + user_inputs={}, + conversation_variables=[], + ) + graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter()) + + start_config = {"id": "start", "data": StartNodeData(title="Start", variables=[]).model_dump()} + start_node = StartNode( + id=start_config["id"], + config=start_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + start_node.init_node_data(start_config["data"]) + + def _create_llm_node(node_id: str, title: str, prompt_text: str) -> MockLLMNode: + llm_data = LLMNodeData( + title=title, + model=ModelConfig(provider="openai", name="gpt-3.5-turbo", mode=LLMMode.CHAT, completion_params={}), + prompt_template=[ + LLMNodeChatModelMessage( + text=prompt_text, + role=PromptMessageRole.USER, + edition_type="basic", + ) + ], + context=ContextConfig(enabled=False, variable_selector=None), + vision=VisionConfig(enabled=False), + reasoning_format="tagged", + ) + llm_config = {"id": node_id, "data": llm_data.model_dump()} + llm_node = MockLLMNode( + id=node_id, + config=llm_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + mock_config=mock_config, + ) + llm_node.init_node_data(llm_config["data"]) + return llm_node + + llm_initial = _create_llm_node("llm_initial", "Initial LLM", "Initial stream") + + human_data = HumanInputNodeData( + title="Human Input", + required_variables=["human.input_ready"], + pause_reason="Awaiting human input", + ) + human_config = {"id": "human", "data": human_data.model_dump()} + human_node = HumanInputNode( + id=human_config["id"], + config=human_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + human_node.init_node_data(human_config["data"]) + + llm_primary = _create_llm_node("llm_primary", "Primary LLM", "Primary stream output") + llm_secondary = _create_llm_node("llm_secondary", "Secondary LLM", "Secondary") + + end_primary_data = EndNodeData( + title="End Primary", + outputs=[ + VariableSelector(variable="initial_text", value_selector=["llm_initial", "text"]), + VariableSelector(variable="primary_text", value_selector=["llm_primary", "text"]), + ], + desc=None, + ) + end_primary_config = {"id": "end_primary", "data": end_primary_data.model_dump()} + end_primary = EndNode( + id=end_primary_config["id"], + config=end_primary_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + end_primary.init_node_data(end_primary_config["data"]) + + end_secondary_data = EndNodeData( + title="End Secondary", + outputs=[ + VariableSelector(variable="initial_text", value_selector=["llm_initial", "text"]), + VariableSelector(variable="secondary_text", value_selector=["llm_secondary", "text"]), + ], + desc=None, + ) + end_secondary_config = {"id": "end_secondary", "data": end_secondary_data.model_dump()} + end_secondary = EndNode( + id=end_secondary_config["id"], + config=end_secondary_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + end_secondary.init_node_data(end_secondary_config["data"]) + + graph = ( + Graph.new() + .add_root(start_node) + .add_node(llm_initial) + .add_node(human_node) + .add_node(llm_primary, from_node_id="human", source_handle="primary") + .add_node(end_primary, from_node_id="llm_primary") + .add_node(llm_secondary, from_node_id="human", source_handle="secondary") + .add_node(end_secondary, from_node_id="llm_secondary") + .build() + ) + return graph, graph_runtime_state + + +def _expected_mock_llm_chunks(text: str) -> list[str]: + chunks: list[str] = [] + for index, word in enumerate(text.split(" ")): + chunk = word if index == 0 else f" {word}" + chunks.append(chunk) + chunks.append("") + return chunks + + +def _assert_stream_chunk_sequence( + chunk_events: Iterable[NodeRunStreamChunkEvent], + expected_nodes: list[str], + expected_chunks: list[str], +) -> None: + actual_nodes = [event.node_id for event in chunk_events] + actual_chunks = [event.chunk for event in chunk_events] + assert actual_nodes == expected_nodes + assert actual_chunks == expected_chunks + + +def test_human_input_llm_streaming_across_multiple_branches() -> None: + mock_config = MockConfig() + mock_config.set_node_outputs("llm_initial", {"text": "Initial stream"}) + mock_config.set_node_outputs("llm_primary", {"text": "Primary stream output"}) + mock_config.set_node_outputs("llm_secondary", {"text": "Secondary"}) + + branch_scenarios = [ + { + "handle": "primary", + "resume_llm": "llm_primary", + "end_node": "end_primary", + "expected_pre_chunks": [ + ("llm_initial", _expected_mock_llm_chunks("Initial stream")), # cached output before branch completes + ("end_primary", ["\n"]), # literal segment emitted when end_primary session activates + ], + "expected_post_chunks": [ + ("llm_primary", _expected_mock_llm_chunks("Primary stream output")), # live stream from chosen branch + ], + }, + { + "handle": "secondary", + "resume_llm": "llm_secondary", + "end_node": "end_secondary", + "expected_pre_chunks": [ + ("llm_initial", _expected_mock_llm_chunks("Initial stream")), # cached output before branch completes + ("end_secondary", ["\n"]), # literal segment emitted when end_secondary session activates + ], + "expected_post_chunks": [ + ("llm_secondary", _expected_mock_llm_chunks("Secondary")), # live stream from chosen branch + ], + }, + ] + + for scenario in branch_scenarios: + runner = TableTestRunner() + + def initial_graph_factory() -> tuple[Graph, GraphRuntimeState]: + return _build_branching_graph(mock_config) + + initial_case = WorkflowTestCase( + description="HumanInput pause before branching decision", + graph_factory=initial_graph_factory, + expected_event_sequence=[ + GraphRunStartedEvent, # initial run: graph execution starts + NodeRunStartedEvent, # start node begins execution + NodeRunSucceededEvent, # start node completes + NodeRunStartedEvent, # llm_initial starts streaming + NodeRunSucceededEvent, # llm_initial completes streaming + NodeRunStartedEvent, # human node begins and issues pause + NodeRunPauseRequestedEvent, # human node requests pause awaiting input + GraphRunPausedEvent, # graph run pauses awaiting resume + ], + ) + + initial_result = runner.run_test_case(initial_case) + + assert initial_result.success, initial_result.event_mismatch_details + assert not any(isinstance(event, NodeRunStreamChunkEvent) for event in initial_result.events) + + graph_runtime_state = initial_result.graph_runtime_state + graph = initial_result.graph + assert graph_runtime_state is not None + assert graph is not None + + graph_runtime_state.variable_pool.add(("human", "input_ready"), True) + graph_runtime_state.variable_pool.add(("human", "edge_source_handle"), scenario["handle"]) + graph_runtime_state.graph_execution.pause_reason = None + + pre_chunk_count = sum(len(chunks) for _, chunks in scenario["expected_pre_chunks"]) + post_chunk_count = sum(len(chunks) for _, chunks in scenario["expected_post_chunks"]) + + expected_resume_sequence: list[type] = ( + [ + GraphRunStartedEvent, + NodeRunStartedEvent, + ] + + [NodeRunStreamChunkEvent] * pre_chunk_count + + [ + NodeRunSucceededEvent, + NodeRunStartedEvent, + ] + + [NodeRunStreamChunkEvent] * post_chunk_count + + [ + NodeRunSucceededEvent, + NodeRunStartedEvent, + NodeRunSucceededEvent, + GraphRunSucceededEvent, + ] + ) + + def resume_graph_factory( + graph_snapshot: Graph = graph, + state_snapshot: GraphRuntimeState = graph_runtime_state, + ) -> tuple[Graph, GraphRuntimeState]: + return graph_snapshot, state_snapshot + + resume_case = WorkflowTestCase( + description=f"HumanInput resumes via {scenario['handle']} branch", + graph_factory=resume_graph_factory, + expected_event_sequence=expected_resume_sequence, + ) + + resume_result = runner.run_test_case(resume_case) + + assert resume_result.success, resume_result.event_mismatch_details + + resume_events = resume_result.events + + chunk_events = [event for event in resume_events if isinstance(event, NodeRunStreamChunkEvent)] + assert len(chunk_events) == pre_chunk_count + post_chunk_count + + pre_chunk_events = chunk_events[:pre_chunk_count] + post_chunk_events = chunk_events[pre_chunk_count:] + + expected_pre_nodes: list[str] = [] + expected_pre_chunks: list[str] = [] + for node_id, chunks in scenario["expected_pre_chunks"]: + expected_pre_nodes.extend([node_id] * len(chunks)) + expected_pre_chunks.extend(chunks) + _assert_stream_chunk_sequence(pre_chunk_events, expected_pre_nodes, expected_pre_chunks) + + expected_post_nodes: list[str] = [] + expected_post_chunks: list[str] = [] + for node_id, chunks in scenario["expected_post_chunks"]: + expected_post_nodes.extend([node_id] * len(chunks)) + expected_post_chunks.extend(chunks) + _assert_stream_chunk_sequence(post_chunk_events, expected_post_nodes, expected_post_chunks) + + human_success_index = next( + index + for index, event in enumerate(resume_events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == "human" + ) + pre_indices = [ + index + for index, event in enumerate(resume_events) + if isinstance(event, NodeRunStreamChunkEvent) and index < human_success_index + ] + assert pre_indices == list(range(2, 2 + pre_chunk_count)) + + resume_chunk_indices = [ + index + for index, event in enumerate(resume_events) + if isinstance(event, NodeRunStreamChunkEvent) and event.node_id == scenario["resume_llm"] + ] + assert resume_chunk_indices, "Expected streaming output from the selected branch" + resume_start_index = next( + index + for index, event in enumerate(resume_events) + if isinstance(event, NodeRunStartedEvent) and event.node_id == scenario["resume_llm"] + ) + resume_success_index = next( + index + for index, event in enumerate(resume_events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == scenario["resume_llm"] + ) + assert resume_start_index < min(resume_chunk_indices) + assert max(resume_chunk_indices) < resume_success_index + + started_nodes = [event.node_id for event in resume_events if isinstance(event, NodeRunStartedEvent)] + assert started_nodes == ["human", scenario["resume_llm"], scenario["end_node"]] diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_human_input_pause_single_branch.py b/api/tests/unit_tests/core/workflow/graph_engine/test_human_input_pause_single_branch.py new file mode 100644 index 0000000000..27d264365d --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_human_input_pause_single_branch.py @@ -0,0 +1,297 @@ +import time + +from core.model_runtime.entities.llm_entities import LLMMode +from core.model_runtime.entities.message_entities import PromptMessageRole +from core.workflow.entities import GraphInitParams +from core.workflow.graph import Graph +from core.workflow.graph_events import ( + GraphRunPausedEvent, + GraphRunStartedEvent, + GraphRunSucceededEvent, + NodeRunPauseRequestedEvent, + NodeRunStartedEvent, + NodeRunStreamChunkEvent, + NodeRunSucceededEvent, +) +from core.workflow.nodes.base.entities import VariableSelector +from core.workflow.nodes.end.end_node import EndNode +from core.workflow.nodes.end.entities import EndNodeData +from core.workflow.nodes.human_input import HumanInputNode +from core.workflow.nodes.human_input.entities import HumanInputNodeData +from core.workflow.nodes.llm.entities import ( + ContextConfig, + LLMNodeChatModelMessage, + LLMNodeData, + ModelConfig, + VisionConfig, +) +from core.workflow.nodes.start.entities import StartNodeData +from core.workflow.nodes.start.start_node import StartNode +from core.workflow.runtime import GraphRuntimeState, VariablePool +from core.workflow.system_variable import SystemVariable + +from .test_mock_config import MockConfig +from .test_mock_nodes import MockLLMNode +from .test_table_runner import TableTestRunner, WorkflowTestCase + + +def _build_llm_human_llm_graph(mock_config: MockConfig) -> tuple[Graph, GraphRuntimeState]: + graph_config: dict[str, object] = {"nodes": [], "edges": []} + graph_init_params = GraphInitParams( + tenant_id="tenant", + app_id="app", + workflow_id="workflow", + graph_config=graph_config, + user_id="user", + user_from="account", + invoke_from="debugger", + call_depth=0, + ) + + variable_pool = VariablePool( + system_variables=SystemVariable(user_id="user", app_id="app", workflow_id="workflow"), + user_inputs={}, + conversation_variables=[], + ) + graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter()) + + start_config = {"id": "start", "data": StartNodeData(title="Start", variables=[]).model_dump()} + start_node = StartNode( + id=start_config["id"], + config=start_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + start_node.init_node_data(start_config["data"]) + + def _create_llm_node(node_id: str, title: str, prompt_text: str) -> MockLLMNode: + llm_data = LLMNodeData( + title=title, + model=ModelConfig(provider="openai", name="gpt-3.5-turbo", mode=LLMMode.CHAT, completion_params={}), + prompt_template=[ + LLMNodeChatModelMessage( + text=prompt_text, + role=PromptMessageRole.USER, + edition_type="basic", + ) + ], + context=ContextConfig(enabled=False, variable_selector=None), + vision=VisionConfig(enabled=False), + reasoning_format="tagged", + ) + llm_config = {"id": node_id, "data": llm_data.model_dump()} + llm_node = MockLLMNode( + id=node_id, + config=llm_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + mock_config=mock_config, + ) + llm_node.init_node_data(llm_config["data"]) + return llm_node + + llm_first = _create_llm_node("llm_initial", "Initial LLM", "Initial prompt") + + human_data = HumanInputNodeData( + title="Human Input", + required_variables=["human.input_ready"], + pause_reason="Awaiting human input", + ) + human_config = {"id": "human", "data": human_data.model_dump()} + human_node = HumanInputNode( + id=human_config["id"], + config=human_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + human_node.init_node_data(human_config["data"]) + + llm_second = _create_llm_node("llm_resume", "Follow-up LLM", "Follow-up prompt") + + end_data = EndNodeData( + title="End", + outputs=[ + VariableSelector(variable="initial_text", value_selector=["llm_initial", "text"]), + VariableSelector(variable="resume_text", value_selector=["llm_resume", "text"]), + ], + desc=None, + ) + end_config = {"id": "end", "data": end_data.model_dump()} + end_node = EndNode( + id=end_config["id"], + config=end_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + end_node.init_node_data(end_config["data"]) + + graph = ( + Graph.new() + .add_root(start_node) + .add_node(llm_first) + .add_node(human_node) + .add_node(llm_second) + .add_node(end_node) + .build() + ) + return graph, graph_runtime_state + + +def _expected_mock_llm_chunks(text: str) -> list[str]: + chunks: list[str] = [] + for index, word in enumerate(text.split(" ")): + chunk = word if index == 0 else f" {word}" + chunks.append(chunk) + chunks.append("") + return chunks + + +def test_human_input_llm_streaming_order_across_pause() -> None: + runner = TableTestRunner() + + initial_text = "Hello, pause" + resume_text = "Welcome back!" + + mock_config = MockConfig() + mock_config.set_node_outputs("llm_initial", {"text": initial_text}) + mock_config.set_node_outputs("llm_resume", {"text": resume_text}) + + expected_initial_sequence: list[type] = [ + GraphRunStartedEvent, # graph run begins + NodeRunStartedEvent, # start node begins + NodeRunSucceededEvent, # start node completes + NodeRunStartedEvent, # llm_initial begins streaming + NodeRunSucceededEvent, # llm_initial completes streaming + NodeRunStartedEvent, # human node begins and requests pause + NodeRunPauseRequestedEvent, # human node pause requested + GraphRunPausedEvent, # graph run pauses awaiting resume + ] + + def graph_factory() -> tuple[Graph, GraphRuntimeState]: + return _build_llm_human_llm_graph(mock_config) + + initial_case = WorkflowTestCase( + description="HumanInput pause preserves LLM streaming order", + graph_factory=graph_factory, + expected_event_sequence=expected_initial_sequence, + ) + + initial_result = runner.run_test_case(initial_case) + + assert initial_result.success, initial_result.event_mismatch_details + + initial_events = initial_result.events + initial_chunks = _expected_mock_llm_chunks(initial_text) + + initial_stream_chunk_events = [event for event in initial_events if isinstance(event, NodeRunStreamChunkEvent)] + assert initial_stream_chunk_events == [] + + pause_index = next(i for i, event in enumerate(initial_events) if isinstance(event, GraphRunPausedEvent)) + llm_succeeded_index = next( + i + for i, event in enumerate(initial_events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == "llm_initial" + ) + assert llm_succeeded_index < pause_index + + graph_runtime_state = initial_result.graph_runtime_state + graph = initial_result.graph + assert graph_runtime_state is not None + assert graph is not None + + coordinator = graph_runtime_state.response_coordinator + stream_buffers = coordinator._stream_buffers # Tests may access internals for assertions + assert ("llm_initial", "text") in stream_buffers + initial_stream_chunks = [event.chunk for event in stream_buffers[("llm_initial", "text")]] + assert initial_stream_chunks == initial_chunks + assert ("llm_resume", "text") not in stream_buffers + + resume_chunks = _expected_mock_llm_chunks(resume_text) + expected_resume_sequence: list[type] = [ + GraphRunStartedEvent, # resumed graph run begins + NodeRunStartedEvent, # human node restarts + NodeRunStreamChunkEvent, # cached llm_initial chunk 1 + NodeRunStreamChunkEvent, # cached llm_initial chunk 2 + NodeRunStreamChunkEvent, # cached llm_initial final chunk + NodeRunStreamChunkEvent, # end node emits combined template separator + NodeRunSucceededEvent, # human node finishes instantly after input + NodeRunStartedEvent, # llm_resume begins streaming + NodeRunStreamChunkEvent, # llm_resume chunk 1 + NodeRunStreamChunkEvent, # llm_resume chunk 2 + NodeRunStreamChunkEvent, # llm_resume final chunk + NodeRunSucceededEvent, # llm_resume completes streaming + NodeRunStartedEvent, # end node starts + NodeRunSucceededEvent, # end node finishes + GraphRunSucceededEvent, # graph run succeeds after resume + ] + + def resume_graph_factory() -> tuple[Graph, GraphRuntimeState]: + assert graph_runtime_state is not None + assert graph is not None + graph_runtime_state.variable_pool.add(("human", "input_ready"), True) + graph_runtime_state.graph_execution.pause_reason = None + return graph, graph_runtime_state + + resume_case = WorkflowTestCase( + description="HumanInput resume continues LLM streaming order", + graph_factory=resume_graph_factory, + expected_event_sequence=expected_resume_sequence, + ) + + resume_result = runner.run_test_case(resume_case) + + assert resume_result.success, resume_result.event_mismatch_details + + resume_events = resume_result.events + + success_index = next(i for i, event in enumerate(resume_events) if isinstance(event, GraphRunSucceededEvent)) + llm_resume_succeeded_index = next( + i + for i, event in enumerate(resume_events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == "llm_resume" + ) + assert llm_resume_succeeded_index < success_index + + resume_chunk_events = [event for event in resume_events if isinstance(event, NodeRunStreamChunkEvent)] + assert [event.node_id for event in resume_chunk_events[:3]] == ["llm_initial"] * 3 + assert [event.chunk for event in resume_chunk_events[:3]] == initial_chunks + assert resume_chunk_events[3].node_id == "end" + assert resume_chunk_events[3].chunk == "\n" + assert [event.node_id for event in resume_chunk_events[4:]] == ["llm_resume"] * 3 + assert [event.chunk for event in resume_chunk_events[4:]] == resume_chunks + + human_success_index = next( + i + for i, event in enumerate(resume_events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == "human" + ) + cached_chunk_indices = [ + i + for i, event in enumerate(resume_events) + if isinstance(event, NodeRunStreamChunkEvent) and event.node_id in {"llm_initial", "end"} + ] + assert all(index < human_success_index for index in cached_chunk_indices) + + llm_resume_start_index = next( + i + for i, event in enumerate(resume_events) + if isinstance(event, NodeRunStartedEvent) and event.node_id == "llm_resume" + ) + llm_resume_success_index = next( + i + for i, event in enumerate(resume_events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == "llm_resume" + ) + llm_resume_chunk_indices = [ + i + for i, event in enumerate(resume_events) + if isinstance(event, NodeRunStreamChunkEvent) and event.node_id == "llm_resume" + ] + assert llm_resume_chunk_indices + first_resume_chunk_index = min(llm_resume_chunk_indices) + last_resume_chunk_index = max(llm_resume_chunk_indices) + assert llm_resume_start_index < first_resume_chunk_index + assert last_resume_chunk_index < llm_resume_success_index + + started_nodes = [event.node_id for event in resume_events if isinstance(event, NodeRunStartedEvent)] + assert started_nodes == ["human", "llm_resume", "end"] diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_if_else_streaming.py b/api/tests/unit_tests/core/workflow/graph_engine/test_if_else_streaming.py new file mode 100644 index 0000000000..dfd33f135f --- /dev/null +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_if_else_streaming.py @@ -0,0 +1,321 @@ +import time + +from core.model_runtime.entities.llm_entities import LLMMode +from core.model_runtime.entities.message_entities import PromptMessageRole +from core.workflow.entities import GraphInitParams +from core.workflow.graph import Graph +from core.workflow.graph_events import ( + GraphRunStartedEvent, + GraphRunSucceededEvent, + NodeRunStartedEvent, + NodeRunStreamChunkEvent, + NodeRunSucceededEvent, +) +from core.workflow.nodes.base.entities import VariableSelector +from core.workflow.nodes.end.end_node import EndNode +from core.workflow.nodes.end.entities import EndNodeData +from core.workflow.nodes.if_else.entities import IfElseNodeData +from core.workflow.nodes.if_else.if_else_node import IfElseNode +from core.workflow.nodes.llm.entities import ( + ContextConfig, + LLMNodeChatModelMessage, + LLMNodeData, + ModelConfig, + VisionConfig, +) +from core.workflow.nodes.start.entities import StartNodeData +from core.workflow.nodes.start.start_node import StartNode +from core.workflow.runtime import GraphRuntimeState, VariablePool +from core.workflow.system_variable import SystemVariable +from core.workflow.utils.condition.entities import Condition + +from .test_mock_config import MockConfig +from .test_mock_nodes import MockLLMNode +from .test_table_runner import TableTestRunner, WorkflowTestCase + + +def _build_if_else_graph(branch_value: str, mock_config: MockConfig) -> tuple[Graph, GraphRuntimeState]: + graph_config: dict[str, object] = {"nodes": [], "edges": []} + graph_init_params = GraphInitParams( + tenant_id="tenant", + app_id="app", + workflow_id="workflow", + graph_config=graph_config, + user_id="user", + user_from="account", + invoke_from="debugger", + call_depth=0, + ) + + variable_pool = VariablePool( + system_variables=SystemVariable(user_id="user", app_id="app", workflow_id="workflow"), + user_inputs={}, + conversation_variables=[], + ) + variable_pool.add(("branch", "value"), branch_value) + graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter()) + + start_config = {"id": "start", "data": StartNodeData(title="Start", variables=[]).model_dump()} + start_node = StartNode( + id=start_config["id"], + config=start_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + start_node.init_node_data(start_config["data"]) + + def _create_llm_node(node_id: str, title: str, prompt_text: str) -> MockLLMNode: + llm_data = LLMNodeData( + title=title, + model=ModelConfig(provider="openai", name="gpt-3.5-turbo", mode=LLMMode.CHAT, completion_params={}), + prompt_template=[ + LLMNodeChatModelMessage( + text=prompt_text, + role=PromptMessageRole.USER, + edition_type="basic", + ) + ], + context=ContextConfig(enabled=False, variable_selector=None), + vision=VisionConfig(enabled=False), + reasoning_format="tagged", + ) + llm_config = {"id": node_id, "data": llm_data.model_dump()} + llm_node = MockLLMNode( + id=node_id, + config=llm_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + mock_config=mock_config, + ) + llm_node.init_node_data(llm_config["data"]) + return llm_node + + llm_initial = _create_llm_node("llm_initial", "Initial LLM", "Initial stream") + + if_else_data = IfElseNodeData( + title="IfElse", + cases=[ + IfElseNodeData.Case( + case_id="primary", + logical_operator="and", + conditions=[ + Condition(variable_selector=["branch", "value"], comparison_operator="is", value="primary") + ], + ), + IfElseNodeData.Case( + case_id="secondary", + logical_operator="and", + conditions=[ + Condition(variable_selector=["branch", "value"], comparison_operator="is", value="secondary") + ], + ), + ], + ) + if_else_config = {"id": "if_else", "data": if_else_data.model_dump()} + if_else_node = IfElseNode( + id=if_else_config["id"], + config=if_else_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + if_else_node.init_node_data(if_else_config["data"]) + + llm_primary = _create_llm_node("llm_primary", "Primary LLM", "Primary stream output") + llm_secondary = _create_llm_node("llm_secondary", "Secondary LLM", "Secondary") + + end_primary_data = EndNodeData( + title="End Primary", + outputs=[ + VariableSelector(variable="initial_text", value_selector=["llm_initial", "text"]), + VariableSelector(variable="primary_text", value_selector=["llm_primary", "text"]), + ], + desc=None, + ) + end_primary_config = {"id": "end_primary", "data": end_primary_data.model_dump()} + end_primary = EndNode( + id=end_primary_config["id"], + config=end_primary_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + end_primary.init_node_data(end_primary_config["data"]) + + end_secondary_data = EndNodeData( + title="End Secondary", + outputs=[ + VariableSelector(variable="initial_text", value_selector=["llm_initial", "text"]), + VariableSelector(variable="secondary_text", value_selector=["llm_secondary", "text"]), + ], + desc=None, + ) + end_secondary_config = {"id": "end_secondary", "data": end_secondary_data.model_dump()} + end_secondary = EndNode( + id=end_secondary_config["id"], + config=end_secondary_config, + graph_init_params=graph_init_params, + graph_runtime_state=graph_runtime_state, + ) + end_secondary.init_node_data(end_secondary_config["data"]) + + graph = ( + Graph.new() + .add_root(start_node) + .add_node(llm_initial) + .add_node(if_else_node) + .add_node(llm_primary, from_node_id="if_else", source_handle="primary") + .add_node(end_primary, from_node_id="llm_primary") + .add_node(llm_secondary, from_node_id="if_else", source_handle="secondary") + .add_node(end_secondary, from_node_id="llm_secondary") + .build() + ) + return graph, graph_runtime_state + + +def _expected_mock_llm_chunks(text: str) -> list[str]: + chunks: list[str] = [] + for index, word in enumerate(text.split(" ")): + chunk = word if index == 0 else f" {word}" + chunks.append(chunk) + chunks.append("") + return chunks + + +def test_if_else_llm_streaming_order() -> None: + mock_config = MockConfig() + mock_config.set_node_outputs("llm_initial", {"text": "Initial stream"}) + mock_config.set_node_outputs("llm_primary", {"text": "Primary stream output"}) + mock_config.set_node_outputs("llm_secondary", {"text": "Secondary"}) + + scenarios = [ + { + "branch": "primary", + "resume_llm": "llm_primary", + "end_node": "end_primary", + "expected_sequence": [ + GraphRunStartedEvent, # graph run begins + NodeRunStartedEvent, # start node begins execution + NodeRunSucceededEvent, # start node completes + NodeRunStartedEvent, # llm_initial starts and streams + NodeRunSucceededEvent, # llm_initial completes streaming + NodeRunStartedEvent, # if_else evaluates conditions + NodeRunStreamChunkEvent, # cached llm_initial chunk 1 flushed + NodeRunStreamChunkEvent, # cached llm_initial chunk 2 flushed + NodeRunStreamChunkEvent, # cached llm_initial final chunk flushed + NodeRunStreamChunkEvent, # template literal newline emitted + NodeRunSucceededEvent, # if_else completes branch selection + NodeRunStartedEvent, # llm_primary begins streaming + NodeRunStreamChunkEvent, # llm_primary chunk 1 + NodeRunStreamChunkEvent, # llm_primary chunk 2 + NodeRunStreamChunkEvent, # llm_primary chunk 3 + NodeRunStreamChunkEvent, # llm_primary final chunk + NodeRunSucceededEvent, # llm_primary completes streaming + NodeRunStartedEvent, # end_primary node starts + NodeRunSucceededEvent, # end_primary finishes aggregation + GraphRunSucceededEvent, # graph run succeeds + ], + "expected_chunks": [ + ("llm_initial", _expected_mock_llm_chunks("Initial stream")), + ("end_primary", ["\n"]), + ("llm_primary", _expected_mock_llm_chunks("Primary stream output")), + ], + }, + { + "branch": "secondary", + "resume_llm": "llm_secondary", + "end_node": "end_secondary", + "expected_sequence": [ + GraphRunStartedEvent, # graph run begins + NodeRunStartedEvent, # start node begins execution + NodeRunSucceededEvent, # start node completes + NodeRunStartedEvent, # llm_initial starts and streams + NodeRunSucceededEvent, # llm_initial completes streaming + NodeRunStartedEvent, # if_else evaluates conditions + NodeRunStreamChunkEvent, # cached llm_initial chunk 1 flushed + NodeRunStreamChunkEvent, # cached llm_initial chunk 2 flushed + NodeRunStreamChunkEvent, # cached llm_initial final chunk flushed + NodeRunStreamChunkEvent, # template literal newline emitted + NodeRunSucceededEvent, # if_else completes branch selection + NodeRunStartedEvent, # llm_secondary begins streaming + NodeRunStreamChunkEvent, # llm_secondary chunk 1 + NodeRunStreamChunkEvent, # llm_secondary final chunk + NodeRunSucceededEvent, # llm_secondary completes + NodeRunStartedEvent, # end_secondary node starts + NodeRunSucceededEvent, # end_secondary finishes aggregation + GraphRunSucceededEvent, # graph run succeeds + ], + "expected_chunks": [ + ("llm_initial", _expected_mock_llm_chunks("Initial stream")), + ("end_secondary", ["\n"]), + ("llm_secondary", _expected_mock_llm_chunks("Secondary")), + ], + }, + ] + + for scenario in scenarios: + runner = TableTestRunner() + + def graph_factory( + branch_value: str = scenario["branch"], + cfg: MockConfig = mock_config, + ) -> tuple[Graph, GraphRuntimeState]: + return _build_if_else_graph(branch_value, cfg) + + test_case = WorkflowTestCase( + description=f"IfElse streaming via {scenario['branch']} branch", + graph_factory=graph_factory, + expected_event_sequence=scenario["expected_sequence"], + ) + + result = runner.run_test_case(test_case) + + assert result.success, result.event_mismatch_details + + chunk_events = [event for event in result.events if isinstance(event, NodeRunStreamChunkEvent)] + expected_nodes: list[str] = [] + expected_chunks: list[str] = [] + for node_id, chunks in scenario["expected_chunks"]: + expected_nodes.extend([node_id] * len(chunks)) + expected_chunks.extend(chunks) + assert [event.node_id for event in chunk_events] == expected_nodes + assert [event.chunk for event in chunk_events] == expected_chunks + + branch_node_index = next( + index + for index, event in enumerate(result.events) + if isinstance(event, NodeRunStartedEvent) and event.node_id == "if_else" + ) + branch_success_index = next( + index + for index, event in enumerate(result.events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == "if_else" + ) + pre_branch_chunk_indices = [ + index + for index, event in enumerate(result.events) + if isinstance(event, NodeRunStreamChunkEvent) and index < branch_success_index + ] + assert len(pre_branch_chunk_indices) == len(_expected_mock_llm_chunks("Initial stream")) + 1 + assert min(pre_branch_chunk_indices) == branch_node_index + 1 + assert max(pre_branch_chunk_indices) < branch_success_index + + resume_chunk_indices = [ + index + for index, event in enumerate(result.events) + if isinstance(event, NodeRunStreamChunkEvent) and event.node_id == scenario["resume_llm"] + ] + assert resume_chunk_indices + resume_start_index = next( + index + for index, event in enumerate(result.events) + if isinstance(event, NodeRunStartedEvent) and event.node_id == scenario["resume_llm"] + ) + resume_success_index = next( + index + for index, event in enumerate(result.events) + if isinstance(event, NodeRunSucceededEvent) and event.node_id == scenario["resume_llm"] + ) + assert resume_start_index < min(resume_chunk_indices) + assert max(resume_chunk_indices) < resume_success_index + + started_nodes = [event.node_id for event in result.events if isinstance(event, NodeRunStartedEvent)] + assert started_nodes == ["start", "llm_initial", "if_else", scenario["resume_llm"], scenario["end_node"]] diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_factory.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_factory.py index 7f802effa6..03de984bd1 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_factory.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_factory.py @@ -27,7 +27,8 @@ from .test_mock_nodes import ( ) if TYPE_CHECKING: - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState from .test_mock_config import MockConfig diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_iteration_simple.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_iteration_simple.py index c39c12925f..48fa00f105 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_iteration_simple.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_iteration_simple.py @@ -42,7 +42,8 @@ def test_mock_iteration_node_preserves_config(): """Test that MockIterationNode preserves mock configuration.""" from core.app.entities.app_invoke_entities import InvokeFrom - from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool from models.enums import UserFrom from tests.unit_tests.core.workflow.graph_engine.test_mock_nodes import MockIterationNode @@ -103,7 +104,8 @@ def test_mock_loop_node_preserves_config(): """Test that MockLoopNode preserves mock configuration.""" from core.app.entities.app_invoke_entities import InvokeFrom - from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool from models.enums import UserFrom from tests.unit_tests.core.workflow.graph_engine.test_mock_nodes import MockLoopNode diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes.py index e5ae32bbff..68f57ee9fb 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes.py @@ -24,7 +24,8 @@ from core.workflow.nodes.template_transform import TemplateTransformNode from core.workflow.nodes.tool import ToolNode if TYPE_CHECKING: - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState from .test_mock_config import MockConfig @@ -561,10 +562,11 @@ class MockIterationNode(MockNodeMixin, IterationNode): def _create_graph_engine(self, index: int, item: Any): """Create a graph engine with MockNodeFactory instead of DifyNodeFactory.""" # Import dependencies - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel + from core.workflow.runtime import GraphRuntimeState # Import our MockNodeFactory instead of DifyNodeFactory from .test_mock_factory import MockNodeFactory @@ -635,10 +637,11 @@ class MockLoopNode(MockNodeMixin, LoopNode): def _create_graph_engine(self, start_at, root_node_id: str): """Create a graph engine with MockNodeFactory instead of DifyNodeFactory.""" # Import dependencies - from core.workflow.entities import GraphInitParams, GraphRuntimeState + from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine from core.workflow.graph_engine.command_channels import InMemoryChannel + from core.workflow.runtime import GraphRuntimeState # Import our MockNodeFactory instead of DifyNodeFactory from .test_mock_factory import MockNodeFactory diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes_template_code.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes_template_code.py index 394addd5c2..23274f5981 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes_template_code.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_nodes_template_code.py @@ -16,8 +16,8 @@ class TestMockTemplateTransformNode: def test_mock_template_transform_node_default_output(self): """Test that MockTemplateTransformNode processes templates with Jinja2.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -76,8 +76,8 @@ class TestMockTemplateTransformNode: def test_mock_template_transform_node_custom_output(self): """Test that MockTemplateTransformNode returns custom configured output.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -137,8 +137,8 @@ class TestMockTemplateTransformNode: def test_mock_template_transform_node_error_simulation(self): """Test that MockTemplateTransformNode can simulate errors.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -196,8 +196,8 @@ class TestMockTemplateTransformNode: def test_mock_template_transform_node_with_variables(self): """Test that MockTemplateTransformNode processes templates with variables.""" from core.variables import StringVariable - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -262,8 +262,8 @@ class TestMockCodeNode: def test_mock_code_node_default_output(self): """Test that MockCodeNode returns default output.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -323,8 +323,8 @@ class TestMockCodeNode: def test_mock_code_node_with_output_schema(self): """Test that MockCodeNode generates outputs based on schema.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -392,8 +392,8 @@ class TestMockCodeNode: def test_mock_code_node_custom_output(self): """Test that MockCodeNode returns custom configured output.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -463,8 +463,8 @@ class TestMockNodeFactory: def test_code_and_template_nodes_mocked_by_default(self): """Test that CODE and TEMPLATE_TRANSFORM nodes are mocked by default (they require SSRF proxy).""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -504,8 +504,8 @@ class TestMockNodeFactory: def test_factory_creates_mock_template_transform_node(self): """Test that MockNodeFactory creates MockTemplateTransformNode for template-transform type.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( @@ -555,8 +555,8 @@ class TestMockNodeFactory: def test_factory_creates_mock_code_node(self): """Test that MockNodeFactory creates MockCodeNode for code type.""" - from core.workflow.entities import GraphInitParams, GraphRuntimeState - from core.workflow.entities.variable_pool import VariablePool + from core.workflow.entities import GraphInitParams + from core.workflow.runtime import GraphRuntimeState, VariablePool # Create test parameters graph_init_params = GraphInitParams( diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_parallel_streaming_workflow.py b/api/tests/unit_tests/core/workflow/graph_engine/test_parallel_streaming_workflow.py index d1f1f53b78..b76fe42fce 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_parallel_streaming_workflow.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_parallel_streaming_workflow.py @@ -13,7 +13,7 @@ from unittest.mock import patch from uuid import uuid4 from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine @@ -27,6 +27,7 @@ from core.workflow.graph_events import ( from core.workflow.node_events import NodeRunResult, StreamCompletedEvent from core.workflow.nodes.llm.node import LLMNode from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_redis_stop_integration.py b/api/tests/unit_tests/core/workflow/graph_engine/test_redis_stop_integration.py index bd41fdeee5..f1a495d20a 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_redis_stop_integration.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_redis_stop_integration.py @@ -13,7 +13,7 @@ import redis from core.app.apps.base_app_queue_manager import AppQueueManager from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel -from core.workflow.graph_engine.entities.commands import AbortCommand, CommandType +from core.workflow.graph_engine.entities.commands import AbortCommand, CommandType, PauseCommand from core.workflow.graph_engine.manager import GraphEngineManager @@ -52,6 +52,29 @@ class TestRedisStopIntegration: assert command_data["command_type"] == CommandType.ABORT assert command_data["reason"] == "Test stop" + def test_graph_engine_manager_sends_pause_command(self): + """Test that GraphEngineManager correctly sends pause command through Redis.""" + task_id = "test-task-pause-123" + expected_channel_key = f"workflow:{task_id}:commands" + + mock_redis = MagicMock() + mock_pipeline = MagicMock() + mock_redis.pipeline.return_value.__enter__ = Mock(return_value=mock_pipeline) + mock_redis.pipeline.return_value.__exit__ = Mock(return_value=None) + + with patch("core.workflow.graph_engine.manager.redis_client", mock_redis): + GraphEngineManager.send_pause_command(task_id, reason="Awaiting resources") + + mock_redis.pipeline.assert_called_once() + calls = mock_pipeline.rpush.call_args_list + assert len(calls) == 1 + assert calls[0][0][0] == expected_channel_key + + command_json = calls[0][0][1] + command_data = json.loads(command_json) + assert command_data["command_type"] == CommandType.PAUSE.value + assert command_data["reason"] == "Awaiting resources" + def test_graph_engine_manager_handles_redis_failure_gracefully(self): """Test that GraphEngineManager handles Redis failures without raising exceptions.""" task_id = "test-task-456" @@ -105,43 +128,64 @@ class TestRedisStopIntegration: channel_key = "workflow:test:commands" channel = RedisChannel(mock_redis, channel_key) - # Create abort command + # Create commands abort_command = AbortCommand(reason="User requested stop") + pause_command = PauseCommand(reason="User requested pause") # Execute channel.send_command(abort_command) + channel.send_command(pause_command) # Verify - mock_redis.pipeline.assert_called_once() + mock_redis.pipeline.assert_called() # Check rpush was called calls = mock_pipeline.rpush.call_args_list - assert len(calls) == 1 + assert len(calls) == 2 assert calls[0][0][0] == channel_key + assert calls[1][0][0] == channel_key - # Verify serialized command - command_json = calls[0][0][1] - command_data = json.loads(command_json) - assert command_data["command_type"] == CommandType.ABORT - assert command_data["reason"] == "User requested stop" + # Verify serialized commands + abort_command_json = calls[0][0][1] + abort_command_data = json.loads(abort_command_json) + assert abort_command_data["command_type"] == CommandType.ABORT.value + assert abort_command_data["reason"] == "User requested stop" - # Check expire was set - mock_pipeline.expire.assert_called_once_with(channel_key, 3600) + pause_command_json = calls[1][0][1] + pause_command_data = json.loads(pause_command_json) + assert pause_command_data["command_type"] == CommandType.PAUSE.value + assert pause_command_data["reason"] == "User requested pause" + + # Check expire was set for each + assert mock_pipeline.expire.call_count == 2 + mock_pipeline.expire.assert_any_call(channel_key, 3600) def test_redis_channel_fetch_commands(self): """Test RedisChannel correctly fetches and deserializes commands.""" # Setup mock_redis = MagicMock() - mock_pipeline = MagicMock() - mock_redis.pipeline.return_value.__enter__ = Mock(return_value=mock_pipeline) - mock_redis.pipeline.return_value.__exit__ = Mock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Mock command data - abort_command_json = json.dumps({"command_type": CommandType.ABORT, "reason": "Test abort", "payload": None}) + abort_command_json = json.dumps( + {"command_type": CommandType.ABORT.value, "reason": "Test abort", "payload": None} + ) + pause_command_json = json.dumps( + {"command_type": CommandType.PAUSE.value, "reason": "Pause requested", "payload": None} + ) # Mock pipeline execute to return commands - mock_pipeline.execute.return_value = [ - [abort_command_json.encode()], # lrange result + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [ + [abort_command_json.encode(), pause_command_json.encode()], # lrange result True, # delete result ] @@ -152,25 +196,38 @@ class TestRedisStopIntegration: commands = channel.fetch_commands() # Verify - assert len(commands) == 1 + assert len(commands) == 2 assert isinstance(commands[0], AbortCommand) assert commands[0].command_type == CommandType.ABORT assert commands[0].reason == "Test abort" + assert isinstance(commands[1], PauseCommand) + assert commands[1].command_type == CommandType.PAUSE + assert commands[1].reason == "Pause requested" # Verify Redis operations - mock_pipeline.lrange.assert_called_once_with(channel_key, 0, -1) - mock_pipeline.delete.assert_called_once_with(channel_key) + pending_pipe.get.assert_called_once_with(f"{channel_key}:pending") + pending_pipe.delete.assert_called_once_with(f"{channel_key}:pending") + fetch_pipe.lrange.assert_called_once_with(channel_key, 0, -1) + fetch_pipe.delete.assert_called_once_with(channel_key) + assert mock_redis.pipeline.call_count == 2 def test_redis_channel_fetch_commands_handles_invalid_json(self): """Test RedisChannel gracefully handles invalid JSON in commands.""" # Setup mock_redis = MagicMock() - mock_pipeline = MagicMock() - mock_redis.pipeline.return_value.__enter__ = Mock(return_value=mock_pipeline) - mock_redis.pipeline.return_value.__exit__ = Mock(return_value=None) + pending_pipe = MagicMock() + fetch_pipe = MagicMock() + pending_context = MagicMock() + fetch_context = MagicMock() + pending_context.__enter__.return_value = pending_pipe + pending_context.__exit__.return_value = None + fetch_context.__enter__.return_value = fetch_pipe + fetch_context.__exit__.return_value = None + mock_redis.pipeline.side_effect = [pending_context, fetch_context] # Mock invalid command data - mock_pipeline.execute.return_value = [ + pending_pipe.execute.return_value = [b"1", 1] + fetch_pipe.execute.return_value = [ [b"invalid json", b'{"command_type": "invalid_type"}'], # lrange result True, # delete result ] diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_table_runner.py b/api/tests/unit_tests/core/workflow/graph_engine/test_table_runner.py index 0f3a142b1a..08f7b00a33 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_table_runner.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_table_runner.py @@ -29,7 +29,6 @@ from core.variables import ( ObjectVariable, StringVariable, ) -from core.workflow.entities import GraphRuntimeState, VariablePool from core.workflow.entities.graph_init_params import GraphInitParams from core.workflow.graph import Graph from core.workflow.graph_engine import GraphEngine @@ -40,6 +39,7 @@ from core.workflow.graph_events import ( GraphRunSucceededEvent, ) from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from .test_mock_config import MockConfig @@ -52,8 +52,8 @@ logger = logging.getLogger(__name__) class WorkflowTestCase: """Represents a single test case for table-driven testing.""" - fixture_path: str - expected_outputs: dict[str, Any] + fixture_path: str = "" + expected_outputs: dict[str, Any] = field(default_factory=dict) inputs: dict[str, Any] = field(default_factory=dict) query: str = "" description: str = "" @@ -61,11 +61,7 @@ class WorkflowTestCase: mock_config: MockConfig | None = None use_auto_mock: bool = False expected_event_sequence: Sequence[type[GraphEngineEvent]] | None = None - tags: list[str] = field(default_factory=list) - skip: bool = False - skip_reason: str = "" - retry_count: int = 0 - custom_validator: Callable[[dict[str, Any]], bool] | None = None + graph_factory: Callable[[], tuple[Graph, GraphRuntimeState]] | None = None @dataclass @@ -80,7 +76,8 @@ class WorkflowTestResult: event_sequence_match: bool | None = None event_mismatch_details: str | None = None events: list[GraphEngineEvent] = field(default_factory=list) - retry_attempts: int = 0 + graph: Graph | None = None + graph_runtime_state: GraphRuntimeState | None = None validation_details: str | None = None @@ -91,7 +88,6 @@ class TestSuiteResult: total_tests: int passed_tests: int failed_tests: int - skipped_tests: int total_execution_time: float results: list[WorkflowTestResult] @@ -106,10 +102,6 @@ class TestSuiteResult: """Get all failed test results.""" return [r for r in self.results if not r.success] - def get_results_by_tag(self, tag: str) -> list[WorkflowTestResult]: - """Get test results filtered by tag.""" - return [r for r in self.results if tag in r.test_case.tags] - class WorkflowRunner: """Core workflow execution engine for tests.""" @@ -286,90 +278,30 @@ class TableTestRunner: Returns: WorkflowTestResult with execution details """ - if test_case.skip: - self.logger.info("Skipping test: %s - %s", test_case.description, test_case.skip_reason) - return WorkflowTestResult( - test_case=test_case, - success=True, - execution_time=0.0, - validation_details=f"Skipped: {test_case.skip_reason}", - ) - - retry_attempts = 0 - last_result = None - last_error = None start_time = time.perf_counter() - for attempt in range(test_case.retry_count + 1): - start_time = time.perf_counter() - - try: - result = self._execute_test_case(test_case) - last_result = result # Save the last result - - if result.success: - result.retry_attempts = retry_attempts - self.logger.info("Test passed: %s", test_case.description) - return result - - last_error = result.error - retry_attempts += 1 - - if attempt < test_case.retry_count: - self.logger.warning( - "Test failed (attempt %d/%d): %s", - attempt + 1, - test_case.retry_count + 1, - test_case.description, - ) - time.sleep(0.5 * (attempt + 1)) # Exponential backoff - - except Exception as e: - last_error = e - retry_attempts += 1 - - if attempt < test_case.retry_count: - self.logger.warning( - "Test error (attempt %d/%d): %s - %s", - attempt + 1, - test_case.retry_count + 1, - test_case.description, - str(e), - ) - time.sleep(0.5 * (attempt + 1)) - - # All retries failed - return the last result if available - if last_result: - last_result.retry_attempts = retry_attempts - self.logger.error("Test failed after %d attempts: %s", retry_attempts, test_case.description) - return last_result - - # If no result available (all attempts threw exceptions), create a failure result - self.logger.error("Test failed after %d attempts: %s", retry_attempts, test_case.description) - return WorkflowTestResult( - test_case=test_case, - success=False, - error=last_error, - execution_time=time.perf_counter() - start_time, - retry_attempts=retry_attempts, - ) + try: + result = self._execute_test_case(test_case) + if result.success: + self.logger.info("Test passed: %s", test_case.description) + else: + self.logger.error("Test failed: %s", test_case.description) + return result + except Exception as exc: + self.logger.exception("Error executing test case: %s", test_case.description) + return WorkflowTestResult( + test_case=test_case, + success=False, + error=exc, + execution_time=time.perf_counter() - start_time, + ) def _execute_test_case(self, test_case: WorkflowTestCase) -> WorkflowTestResult: """Internal method to execute a single test case.""" start_time = time.perf_counter() try: - # Load fixture data - fixture_data = self.workflow_runner.load_fixture(test_case.fixture_path) - - # Create graph from fixture - graph, graph_runtime_state = self.workflow_runner.create_graph_from_fixture( - fixture_data=fixture_data, - inputs=test_case.inputs, - query=test_case.query, - use_mock_factory=test_case.use_auto_mock, - mock_config=test_case.mock_config, - ) + graph, graph_runtime_state = self._create_graph_runtime_state(test_case) # Create and run the engine with configured worker settings engine = GraphEngine( @@ -384,7 +316,7 @@ class TableTestRunner: ) # Execute and collect events - events = [] + events: list[GraphEngineEvent] = [] for event in engine.run(): events.append(event) @@ -416,6 +348,8 @@ class TableTestRunner: events=events, event_sequence_match=event_sequence_match, event_mismatch_details=event_mismatch_details, + graph=graph, + graph_runtime_state=graph_runtime_state, ) # Get actual outputs @@ -423,9 +357,7 @@ class TableTestRunner: actual_outputs = success_event.outputs or {} # Validate outputs - output_success, validation_details = self._validate_outputs( - test_case.expected_outputs, actual_outputs, test_case.custom_validator - ) + output_success, validation_details = self._validate_outputs(test_case.expected_outputs, actual_outputs) # Overall success requires both output and event sequence validation success = output_success and (event_sequence_match if event_sequence_match is not None else True) @@ -440,6 +372,8 @@ class TableTestRunner: events=events, validation_details=validation_details, error=None if success else Exception(validation_details or event_mismatch_details or "Test failed"), + graph=graph, + graph_runtime_state=graph_runtime_state, ) except Exception as e: @@ -449,13 +383,33 @@ class TableTestRunner: success=False, error=e, execution_time=time.perf_counter() - start_time, + graph=graph if "graph" in locals() else None, + graph_runtime_state=graph_runtime_state if "graph_runtime_state" in locals() else None, ) + def _create_graph_runtime_state(self, test_case: WorkflowTestCase) -> tuple[Graph, GraphRuntimeState]: + """Create or retrieve graph/runtime state according to test configuration.""" + + if test_case.graph_factory is not None: + return test_case.graph_factory() + + if not test_case.fixture_path: + raise ValueError("fixture_path must be provided when graph_factory is not specified") + + fixture_data = self.workflow_runner.load_fixture(test_case.fixture_path) + + return self.workflow_runner.create_graph_from_fixture( + fixture_data=fixture_data, + inputs=test_case.inputs, + query=test_case.query, + use_mock_factory=test_case.use_auto_mock, + mock_config=test_case.mock_config, + ) + def _validate_outputs( self, expected_outputs: dict[str, Any], actual_outputs: dict[str, Any], - custom_validator: Callable[[dict[str, Any]], bool] | None = None, ) -> tuple[bool, str | None]: """ Validate actual outputs against expected outputs. @@ -490,14 +444,6 @@ class TableTestRunner: f"Value mismatch for key '{key}':\n Expected: {expected_value}\n Actual: {actual_value}" ) - # Apply custom validator if provided - if custom_validator: - try: - if not custom_validator(actual_outputs): - validation_errors.append("Custom validator failed") - except Exception as e: - validation_errors.append(f"Custom validator error: {str(e)}") - if validation_errors: return False, "\n".join(validation_errors) @@ -537,7 +483,6 @@ class TableTestRunner: self, test_cases: list[WorkflowTestCase], parallel: bool = False, - tags_filter: list[str] | None = None, fail_fast: bool = False, ) -> TestSuiteResult: """ @@ -546,22 +491,16 @@ class TableTestRunner: Args: test_cases: List of test cases to execute parallel: Run tests in parallel - tags_filter: Only run tests with specified tags - fail_fast: Stop execution on first failure + fail_fast: Stop execution on first failure Returns: TestSuiteResult with aggregated results """ - # Filter by tags if specified - if tags_filter: - test_cases = [tc for tc in test_cases if any(tag in tc.tags for tag in tags_filter)] - if not test_cases: return TestSuiteResult( total_tests=0, passed_tests=0, failed_tests=0, - skipped_tests=0, total_execution_time=0.0, results=[], ) @@ -576,16 +515,14 @@ class TableTestRunner: # Calculate statistics total_tests = len(results) - passed_tests = sum(1 for r in results if r.success and not r.test_case.skip) - failed_tests = sum(1 for r in results if not r.success and not r.test_case.skip) - skipped_tests = sum(1 for r in results if r.test_case.skip) + passed_tests = sum(1 for r in results if r.success) + failed_tests = total_tests - passed_tests total_execution_time = time.perf_counter() - start_time return TestSuiteResult( total_tests=total_tests, passed_tests=passed_tests, failed_tests=failed_tests, - skipped_tests=skipped_tests, total_execution_time=total_execution_time, results=results, ) @@ -598,7 +535,7 @@ class TableTestRunner: result = self.run_test_case(test_case) results.append(result) - if fail_fast and not result.success and not result.test_case.skip: + if fail_fast and not result.success: self.logger.info("Fail-fast enabled: stopping execution") break @@ -618,11 +555,11 @@ class TableTestRunner: result = future.result() results.append(result) - if fail_fast and not result.success and not result.test_case.skip: + if fail_fast and not result.success: self.logger.info("Fail-fast enabled: cancelling remaining tests") - # Cancel remaining futures - for f in future_to_test: - f.cancel() + for remaining_future in future_to_test: + if not remaining_future.done(): + remaining_future.cancel() break except Exception as e: @@ -636,8 +573,9 @@ class TableTestRunner: ) if fail_fast: - for f in future_to_test: - f.cancel() + for remaining_future in future_to_test: + if not remaining_future.done(): + remaining_future.cancel() break return results @@ -663,7 +601,6 @@ class TableTestRunner: report.append(f" Total Tests: {suite_result.total_tests}") report.append(f" Passed: {suite_result.passed_tests}") report.append(f" Failed: {suite_result.failed_tests}") - report.append(f" Skipped: {suite_result.skipped_tests}") report.append(f" Success Rate: {suite_result.success_rate:.1f}%") report.append(f" Total Time: {suite_result.total_execution_time:.2f}s") report.append("") diff --git a/api/tests/unit_tests/core/workflow/nodes/answer/test_answer.py b/api/tests/unit_tests/core/workflow/nodes/answer/test_answer.py index 79f3f45ce2..d151bbe015 100644 --- a/api/tests/unit_tests/core/workflow/nodes/answer/test_answer.py +++ b/api/tests/unit_tests/core/workflow/nodes/answer/test_answer.py @@ -3,11 +3,12 @@ import uuid from unittest.mock import MagicMock from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.nodes.answer.answer_node import AnswerNode from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from extensions.ext_database import db from models.enums import UserFrom diff --git a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py index b34f73be5f..f040a92b6f 100644 --- a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py +++ b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_executor.py @@ -1,4 +1,3 @@ -from core.workflow.entities import VariablePool from core.workflow.nodes.http_request import ( BodyData, HttpRequestNodeAuthorization, @@ -7,6 +6,7 @@ from core.workflow.nodes.http_request import ( ) from core.workflow.nodes.http_request.entities import HttpRequestNodeTimeout from core.workflow.nodes.http_request.executor import Executor +from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable diff --git a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py index 61ce640edd..3ffb5c0fdf 100644 --- a/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/llm/test_node.py @@ -20,8 +20,7 @@ from core.model_runtime.entities.message_entities import ( from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory from core.variables import ArrayAnySegment, ArrayFileSegment, NoneSegment -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool -from core.workflow.graph import Graph +from core.workflow.entities import GraphInitParams from core.workflow.nodes.llm import llm_utils from core.workflow.nodes.llm.entities import ( ContextConfig, @@ -33,6 +32,7 @@ from core.workflow.nodes.llm.entities import ( ) from core.workflow.nodes.llm.file_saver import LLMFileSaver from core.workflow.nodes.llm.node import LLMNode +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom from models.provider import ProviderType @@ -83,14 +83,6 @@ def graph_init_params() -> GraphInitParams: ) -@pytest.fixture -def graph() -> Graph: - # TODO: This fixture uses old Graph constructor parameters that are incompatible - # with the new queue-based engine. Need to rewrite for new engine architecture. - pytest.skip("Graph fixture incompatible with new queue-based engine - needs rewrite for ResponseStreamCoordinator") - return Graph() - - @pytest.fixture def graph_runtime_state() -> GraphRuntimeState: variable_pool = VariablePool( @@ -105,7 +97,7 @@ def graph_runtime_state() -> GraphRuntimeState: @pytest.fixture def llm_node( - llm_node_data: LLMNodeData, graph_init_params: GraphInitParams, graph: Graph, graph_runtime_state: GraphRuntimeState + llm_node_data: LLMNodeData, graph_init_params: GraphInitParams, graph_runtime_state: GraphRuntimeState ) -> LLMNode: mock_file_saver = mock.MagicMock(spec=LLMFileSaver) node_config = { @@ -493,9 +485,7 @@ def test_handle_list_messages_basic(llm_node): @pytest.fixture -def llm_node_for_multimodal( - llm_node_data, graph_init_params, graph, graph_runtime_state -) -> tuple[LLMNode, LLMFileSaver]: +def llm_node_for_multimodal(llm_node_data, graph_init_params, graph_runtime_state) -> tuple[LLMNode, LLMFileSaver]: mock_file_saver: LLMFileSaver = mock.MagicMock(spec=LLMFileSaver) node_config = { "id": "1", @@ -655,7 +645,7 @@ class TestSaveMultimodalOutputAndConvertResultToMarkdown: gen = llm_node._save_multimodal_output_and_convert_result_to_markdown( contents=frozenset(["hello world"]), file_saver=mock_file_saver, file_outputs=[] ) - assert list(gen) == ["frozenset({'hello world'})"] + assert list(gen) == ["hello world"] mock_file_saver.save_binary_string.assert_not_called() mock_file_saver.save_remote_url.assert_not_called() diff --git a/api/tests/unit_tests/core/workflow/nodes/test_if_else.py b/api/tests/unit_tests/core/workflow/nodes/test_if_else.py index 69e0052543..962e43a897 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_if_else.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_if_else.py @@ -7,12 +7,13 @@ import pytest from core.app.entities.app_invoke_entities import InvokeFrom from core.file import File, FileTransferMethod, FileType from core.variables import ArrayFileSegment -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.enums import WorkflowNodeExecutionStatus from core.workflow.graph import Graph from core.workflow.nodes.if_else.entities import IfElseNodeData from core.workflow.nodes.if_else.if_else_node import IfElseNode from core.workflow.nodes.node_factory import DifyNodeFactory +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.utils.condition.entities import Condition, SubCondition, SubVariableCondition from extensions.ext_database import db diff --git a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py index 6189febdf5..6af4777e0e 100644 --- a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py +++ b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py @@ -6,11 +6,12 @@ from uuid import uuid4 from core.app.entities.app_invoke_entities import InvokeFrom from core.variables import ArrayStringVariable, StringVariable from core.workflow.conversation_variable_updater import ConversationVariableUpdater -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.nodes.variable_assigner.v1 import VariableAssignerNode from core.workflow.nodes.variable_assigner.v1.node_data import WriteMode +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom diff --git a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_variable_assigner_v2.py b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_variable_assigner_v2.py index b842dfdb58..80071c8616 100644 --- a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_variable_assigner_v2.py +++ b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v2/test_variable_assigner_v2.py @@ -4,11 +4,12 @@ from uuid import uuid4 from core.app.entities.app_invoke_entities import InvokeFrom from core.variables import ArrayStringVariable -from core.workflow.entities import GraphInitParams, GraphRuntimeState, VariablePool +from core.workflow.entities import GraphInitParams from core.workflow.graph import Graph from core.workflow.nodes.node_factory import DifyNodeFactory from core.workflow.nodes.variable_assigner.v2 import VariableAssignerNode from core.workflow.nodes.variable_assigner.v2.enums import InputType, Operation +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.system_variable import SystemVariable from models.enums import UserFrom diff --git a/api/tests/unit_tests/core/workflow/test_variable_pool.py b/api/tests/unit_tests/core/workflow/test_variable_pool.py index 66d9d3fc14..9733bf60eb 100644 --- a/api/tests/unit_tests/core/workflow/test_variable_pool.py +++ b/api/tests/unit_tests/core/workflow/test_variable_pool.py @@ -27,7 +27,7 @@ from core.variables.variables import ( VariableUnion, ) from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID -from core.workflow.entities import VariablePool +from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable from factories.variable_factory import build_segment, segment_to_variable diff --git a/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py b/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py deleted file mode 100644 index 9f8f52015b..0000000000 --- a/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py +++ /dev/null @@ -1,476 +0,0 @@ -import json -from unittest.mock import MagicMock - -import pytest -from sqlalchemy.orm import Session - -from core.app.app_config.entities import AppAdditionalFeatures, WorkflowUIBasedAppConfig -from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom -from core.app.entities.queue_entities import ( - QueueNodeFailedEvent, - QueueNodeStartedEvent, - QueueNodeSucceededEvent, -) -from core.workflow.entities import ( - WorkflowExecution, - WorkflowNodeExecution, -) -from core.workflow.enums import ( - WorkflowExecutionStatus, - WorkflowNodeExecutionMetadataKey, - WorkflowNodeExecutionStatus, - WorkflowType, -) -from core.workflow.nodes import NodeType -from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository -from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository -from core.workflow.system_variable import SystemVariable -from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager -from libs.datetime_utils import naive_utc_now -from models.enums import CreatorUserRole -from models.model import AppMode -from models.workflow import Workflow, WorkflowRun - - -@pytest.fixture -def real_app_generate_entity(): - additional_features = AppAdditionalFeatures( - file_upload=None, - opening_statement=None, - suggested_questions=[], - suggested_questions_after_answer=False, - show_retrieve_source=False, - more_like_this=False, - speech_to_text=False, - text_to_speech=None, - trace_config=None, - ) - - app_config = WorkflowUIBasedAppConfig( - tenant_id="test-tenant-id", - app_id="test-app-id", - app_mode=AppMode.WORKFLOW, - additional_features=additional_features, - workflow_id="test-workflow-id", - ) - - entity = AdvancedChatAppGenerateEntity( - task_id="test-task-id", - app_config=app_config, - inputs={"query": "test query"}, - files=[], - user_id="test-user-id", - stream=False, - invoke_from=InvokeFrom.WEB_APP, - query="test query", - conversation_id="test-conversation-id", - ) - - return entity - - -@pytest.fixture -def real_workflow_system_variables(): - return SystemVariable( - query="test query", - conversation_id="test-conversation-id", - user_id="test-user-id", - app_id="test-app-id", - workflow_id="test-workflow-id", - workflow_execution_id="test-workflow-run-id", - ) - - -@pytest.fixture -def mock_node_execution_repository(): - repo = MagicMock(spec=WorkflowNodeExecutionRepository) - return repo - - -@pytest.fixture -def mock_workflow_execution_repository(): - repo = MagicMock(spec=WorkflowExecutionRepository) - return repo - - -@pytest.fixture -def real_workflow_entity(): - return CycleManagerWorkflowInfo( - workflow_id="test-workflow-id", # Matches ID used in other fixtures - workflow_type=WorkflowType.WORKFLOW, - version="1.0.0", - graph_data={ - "nodes": [ - { - "id": "node1", - "type": "chat", # NodeType is a string enum - "name": "Chat Node", - "data": {"model": "gpt-3.5-turbo", "prompt": "test prompt"}, - } - ], - "edges": [], - }, - ) - - -@pytest.fixture -def workflow_cycle_manager( - real_app_generate_entity, - real_workflow_system_variables, - mock_workflow_execution_repository, - mock_node_execution_repository, - real_workflow_entity, -): - return WorkflowCycleManager( - application_generate_entity=real_app_generate_entity, - workflow_system_variables=real_workflow_system_variables, - workflow_info=real_workflow_entity, - workflow_execution_repository=mock_workflow_execution_repository, - workflow_node_execution_repository=mock_node_execution_repository, - ) - - -@pytest.fixture -def mock_session(): - session = MagicMock(spec=Session) - return session - - -@pytest.fixture -def real_workflow(): - workflow = Workflow() - workflow.id = "test-workflow-id" - workflow.tenant_id = "test-tenant-id" - workflow.app_id = "test-app-id" - workflow.type = "chat" - workflow.version = "1.0" - - graph_data = {"nodes": [], "edges": []} - workflow.graph = json.dumps(graph_data) - workflow.features = json.dumps({"file_upload": {"enabled": False}}) - workflow.created_by = "test-user-id" - workflow.created_at = naive_utc_now() - workflow.updated_at = naive_utc_now() - workflow._environment_variables = "{}" - workflow._conversation_variables = "{}" - - return workflow - - -@pytest.fixture -def real_workflow_run(): - workflow_run = WorkflowRun() - workflow_run.id = "test-workflow-run-id" - workflow_run.tenant_id = "test-tenant-id" - workflow_run.app_id = "test-app-id" - workflow_run.workflow_id = "test-workflow-id" - workflow_run.type = "chat" - workflow_run.triggered_from = "app-run" - workflow_run.version = "1.0" - workflow_run.graph = json.dumps({"nodes": [], "edges": []}) - workflow_run.inputs = json.dumps({"query": "test query"}) - workflow_run.status = WorkflowExecutionStatus.RUNNING - workflow_run.outputs = json.dumps({"answer": "test answer"}) - workflow_run.created_by_role = CreatorUserRole.ACCOUNT - workflow_run.created_by = "test-user-id" - workflow_run.created_at = naive_utc_now() - - return workflow_run - - -def test_init( - workflow_cycle_manager, - real_app_generate_entity, - real_workflow_system_variables, - mock_workflow_execution_repository, - mock_node_execution_repository, -): - """Test initialization of WorkflowCycleManager""" - assert workflow_cycle_manager._application_generate_entity == real_app_generate_entity - assert workflow_cycle_manager._workflow_system_variables == real_workflow_system_variables - assert workflow_cycle_manager._workflow_execution_repository == mock_workflow_execution_repository - assert workflow_cycle_manager._workflow_node_execution_repository == mock_node_execution_repository - - -def test_handle_workflow_run_start(workflow_cycle_manager): - """Test handle_workflow_run_start method""" - # Call the method - workflow_execution = workflow_cycle_manager.handle_workflow_run_start() - - # Verify the result - assert workflow_execution.workflow_id == "test-workflow-id" - - # Verify the workflow_execution_repository.save was called - workflow_cycle_manager._workflow_execution_repository.save.assert_called_once_with(workflow_execution) - - -def test_handle_workflow_run_success(workflow_cycle_manager, mock_workflow_execution_repository): - """Test handle_workflow_run_success method""" - # Create a real WorkflowExecution - - workflow_execution = WorkflowExecution( - id_="test-workflow-run-id", - workflow_id="test-workflow-id", - workflow_type=WorkflowType.WORKFLOW, - workflow_version="1.0", - graph={"nodes": [], "edges": []}, - inputs={"query": "test query"}, - started_at=naive_utc_now(), - ) - - # Pre-populate the cache with the workflow execution - workflow_cycle_manager._workflow_execution_cache[workflow_execution.id_] = workflow_execution - - # Call the method - result = workflow_cycle_manager.handle_workflow_run_success( - workflow_run_id="test-workflow-run-id", - total_tokens=100, - total_steps=5, - outputs={"answer": "test answer"}, - ) - - # Verify the result - assert result == workflow_execution - assert result.status == WorkflowExecutionStatus.SUCCEEDED - assert result.outputs == {"answer": "test answer"} - assert result.total_tokens == 100 - assert result.total_steps == 5 - assert result.finished_at is not None - - -def test_handle_workflow_run_failed(workflow_cycle_manager, mock_workflow_execution_repository): - """Test handle_workflow_run_failed method""" - # Create a real WorkflowExecution - - workflow_execution = WorkflowExecution( - id_="test-workflow-run-id", - workflow_id="test-workflow-id", - workflow_type=WorkflowType.WORKFLOW, - workflow_version="1.0", - graph={"nodes": [], "edges": []}, - inputs={"query": "test query"}, - started_at=naive_utc_now(), - ) - - # Pre-populate the cache with the workflow execution - workflow_cycle_manager._workflow_execution_cache[workflow_execution.id_] = workflow_execution - - # No running node executions in cache (empty cache) - - # Call the method - result = workflow_cycle_manager.handle_workflow_run_failed( - workflow_run_id="test-workflow-run-id", - total_tokens=50, - total_steps=3, - status=WorkflowExecutionStatus.FAILED, - error_message="Test error message", - ) - - # Verify the result - assert result == workflow_execution - assert result.status == WorkflowExecutionStatus.FAILED - assert result.error_message == "Test error message" - assert result.total_tokens == 50 - assert result.total_steps == 3 - assert result.finished_at is not None - - -def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_execution_repository): - """Test handle_node_execution_start method""" - # Create a real WorkflowExecution - - workflow_execution = WorkflowExecution( - id_="test-workflow-execution-id", - workflow_id="test-workflow-id", - workflow_type=WorkflowType.WORKFLOW, - workflow_version="1.0", - graph={"nodes": [], "edges": []}, - inputs={"query": "test query"}, - started_at=naive_utc_now(), - ) - - # Pre-populate the cache with the workflow execution - workflow_cycle_manager._workflow_execution_cache[workflow_execution.id_] = workflow_execution - - # Create a mock event - event = MagicMock(spec=QueueNodeStartedEvent) - event.node_execution_id = "test-node-execution-id" - event.node_id = "test-node-id" - event.node_type = NodeType.LLM - event.node_title = "Test Node" - event.predecessor_node_id = "test-predecessor-node-id" - event.node_run_index = 1 - event.parallel_mode_run_id = "test-parallel-mode-run-id" - event.in_iteration_id = "test-iteration-id" - event.in_loop_id = "test-loop-id" - - # Call the method - result = workflow_cycle_manager.handle_node_execution_start( - workflow_execution_id=workflow_execution.id_, - event=event, - ) - - # Verify the result - assert result.workflow_id == workflow_execution.workflow_id - assert result.workflow_execution_id == workflow_execution.id_ - assert result.node_execution_id == event.node_execution_id - assert result.node_id == event.node_id - assert result.node_type == event.node_type - assert result.title == event.node_title - assert result.status == WorkflowNodeExecutionStatus.RUNNING - - # Verify save was called - workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(result) - - -def test_get_workflow_execution_or_raise_error(workflow_cycle_manager, mock_workflow_execution_repository): - """Test _get_workflow_execution_or_raise_error method""" - # Create a real WorkflowExecution - - workflow_execution = WorkflowExecution( - id_="test-workflow-run-id", - workflow_id="test-workflow-id", - workflow_type=WorkflowType.WORKFLOW, - workflow_version="1.0", - graph={"nodes": [], "edges": []}, - inputs={"query": "test query"}, - started_at=naive_utc_now(), - ) - - # Pre-populate the cache with the workflow execution - workflow_cycle_manager._workflow_execution_cache["test-workflow-run-id"] = workflow_execution - - # Call the method - result = workflow_cycle_manager._get_workflow_execution_or_raise_error("test-workflow-run-id") - - # Verify the result - assert result == workflow_execution - - # Test error case - clear cache - workflow_cycle_manager._workflow_execution_cache.clear() - - # Expect an error when execution is not found - from core.app.task_pipeline.exc import WorkflowRunNotFoundError - - with pytest.raises(WorkflowRunNotFoundError): - workflow_cycle_manager._get_workflow_execution_or_raise_error("non-existent-id") - - -def test_handle_workflow_node_execution_success(workflow_cycle_manager): - """Test handle_workflow_node_execution_success method""" - # Create a mock event - event = MagicMock(spec=QueueNodeSucceededEvent) - event.node_execution_id = "test-node-execution-id" - event.inputs = {"input": "test input"} - event.process_data = {"process": "test process"} - event.outputs = {"output": "test output"} - event.execution_metadata = {WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: 100} - event.start_at = naive_utc_now() - - # Create a real node execution - - node_execution = WorkflowNodeExecution( - id="test-node-execution-record-id", - node_execution_id="test-node-execution-id", - workflow_id="test-workflow-id", - workflow_execution_id="test-workflow-run-id", - index=1, - node_id="test-node-id", - node_type=NodeType.LLM, - title="Test Node", - created_at=naive_utc_now(), - ) - - # Pre-populate the cache with the node execution - workflow_cycle_manager._node_execution_cache["test-node-execution-id"] = node_execution - - # Call the method - result = workflow_cycle_manager.handle_workflow_node_execution_success( - event=event, - ) - - # Verify the result - assert result == node_execution - assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED - - # Verify save was called - workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(node_execution) - - -def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_workflow_execution_repository): - """Test handle_workflow_run_partial_success method""" - # Create a real WorkflowExecution - - workflow_execution = WorkflowExecution( - id_="test-workflow-run-id", - workflow_id="test-workflow-id", - workflow_type=WorkflowType.WORKFLOW, - workflow_version="1.0", - graph={"nodes": [], "edges": []}, - inputs={"query": "test query"}, - started_at=naive_utc_now(), - ) - - # Pre-populate the cache with the workflow execution - workflow_cycle_manager._workflow_execution_cache[workflow_execution.id_] = workflow_execution - - # Call the method - result = workflow_cycle_manager.handle_workflow_run_partial_success( - workflow_run_id="test-workflow-run-id", - total_tokens=75, - total_steps=4, - outputs={"partial_answer": "test partial answer"}, - exceptions_count=2, - ) - - # Verify the result - assert result == workflow_execution - assert result.status == WorkflowExecutionStatus.PARTIAL_SUCCEEDED - assert result.outputs == {"partial_answer": "test partial answer"} - assert result.total_tokens == 75 - assert result.total_steps == 4 - assert result.exceptions_count == 2 - assert result.finished_at is not None - - -def test_handle_workflow_node_execution_failed(workflow_cycle_manager): - """Test handle_workflow_node_execution_failed method""" - # Create a mock event - event = MagicMock(spec=QueueNodeFailedEvent) - event.node_execution_id = "test-node-execution-id" - event.inputs = {"input": "test input"} - event.process_data = {"process": "test process"} - event.outputs = {"output": "test output"} - event.execution_metadata = {WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: 100} - event.start_at = naive_utc_now() - event.error = "Test error message" - - # Create a real node execution - - node_execution = WorkflowNodeExecution( - id="test-node-execution-record-id", - node_execution_id="test-node-execution-id", - workflow_id="test-workflow-id", - workflow_execution_id="test-workflow-run-id", - index=1, - node_id="test-node-id", - node_type=NodeType.LLM, - title="Test Node", - created_at=naive_utc_now(), - ) - - # Pre-populate the cache with the node execution - workflow_cycle_manager._node_execution_cache["test-node-execution-id"] = node_execution - - # Call the method - result = workflow_cycle_manager.handle_workflow_node_execution_failed( - event=event, - ) - - # Verify the result - assert result == node_execution - assert result.status == WorkflowNodeExecutionStatus.FAILED - assert result.error == "Test error message" - - # Verify save was called - workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(node_execution) diff --git a/api/tests/unit_tests/core/workflow/test_workflow_entry.py b/api/tests/unit_tests/core/workflow/test_workflow_entry.py index 324f58abf6..75de5c455f 100644 --- a/api/tests/unit_tests/core/workflow/test_workflow_entry.py +++ b/api/tests/unit_tests/core/workflow/test_workflow_entry.py @@ -7,7 +7,7 @@ from core.workflow.constants import ( CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, ) -from core.workflow.entities.variable_pool import VariablePool +from core.workflow.runtime import VariablePool from core.workflow.system_variable import SystemVariable from core.workflow.workflow_entry import WorkflowEntry diff --git a/api/tests/unit_tests/core/workflow/test_workflow_entry_redis_channel.py b/api/tests/unit_tests/core/workflow/test_workflow_entry_redis_channel.py index c3d59aaf3f..bc55d3fccf 100644 --- a/api/tests/unit_tests/core/workflow/test_workflow_entry_redis_channel.py +++ b/api/tests/unit_tests/core/workflow/test_workflow_entry_redis_channel.py @@ -3,8 +3,8 @@ from unittest.mock import MagicMock, patch from core.app.entities.app_invoke_entities import InvokeFrom -from core.workflow.entities import GraphRuntimeState, VariablePool from core.workflow.graph_engine.command_channels.redis_channel import RedisChannel +from core.workflow.runtime import GraphRuntimeState, VariablePool from core.workflow.workflow_entry import WorkflowEntry from models.enums import UserFrom diff --git a/api/tests/unit_tests/factories/test_build_from_mapping.py b/api/tests/unit_tests/factories/test_build_from_mapping.py index 39280c9267..77c4956c04 100644 --- a/api/tests/unit_tests/factories/test_build_from_mapping.py +++ b/api/tests/unit_tests/factories/test_build_from_mapping.py @@ -150,6 +150,42 @@ def test_build_from_remote_url(mock_http_head): assert file.size == 2048 +@pytest.mark.parametrize( + ("file_type", "should_pass", "expected_error"), + [ + ("image", True, None), + ("document", False, "Detected file type does not match the specified type"), + ("video", False, "Detected file type does not match the specified type"), + ], +) +def test_build_from_remote_url_strict_validation(mock_http_head, file_type, should_pass, expected_error): + """Test strict type validation for remote_url.""" + mapping = { + "transfer_method": "remote_url", + "url": TEST_REMOTE_URL, + "type": file_type, + } + if should_pass: + file = build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID, strict_type_validation=True) + assert file.type == FileType(file_type) + else: + with pytest.raises(ValueError, match=expected_error): + build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID, strict_type_validation=True) + + +def test_build_from_remote_url_without_strict_validation(mock_http_head): + """Test that remote_url allows type mismatch when strict_type_validation is False.""" + mapping = { + "transfer_method": "remote_url", + "url": TEST_REMOTE_URL, + "type": "document", + } + file = build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID, strict_type_validation=False) + assert file.transfer_method == FileTransferMethod.REMOTE_URL + assert file.type == FileType.DOCUMENT + assert file.filename == "remote_test.jpg" + + def test_tool_file_not_found(): """Test ToolFile not found in database.""" with patch("factories.file_factory.db.session.scalar", return_value=None): diff --git a/api/tests/unit_tests/libs/test_custom_inputs.py b/api/tests/unit_tests/libs/test_custom_inputs.py new file mode 100644 index 0000000000..7e4c3b4ff0 --- /dev/null +++ b/api/tests/unit_tests/libs/test_custom_inputs.py @@ -0,0 +1,68 @@ +"""Unit tests for custom input types.""" + +import pytest + +from libs.custom_inputs import time_duration + + +class TestTimeDuration: + """Test time_duration input validator.""" + + def test_valid_days(self): + """Test valid days format.""" + result = time_duration("7d") + assert result == "7d" + + def test_valid_hours(self): + """Test valid hours format.""" + result = time_duration("4h") + assert result == "4h" + + def test_valid_minutes(self): + """Test valid minutes format.""" + result = time_duration("30m") + assert result == "30m" + + def test_valid_seconds(self): + """Test valid seconds format.""" + result = time_duration("30s") + assert result == "30s" + + def test_uppercase_conversion(self): + """Test uppercase units are converted to lowercase.""" + result = time_duration("7D") + assert result == "7d" + + result = time_duration("4H") + assert result == "4h" + + def test_invalid_format_no_unit(self): + """Test invalid format without unit.""" + with pytest.raises(ValueError, match="Invalid time duration format"): + time_duration("7") + + def test_invalid_format_wrong_unit(self): + """Test invalid format with wrong unit.""" + with pytest.raises(ValueError, match="Invalid time duration format"): + time_duration("7days") + + with pytest.raises(ValueError, match="Invalid time duration format"): + time_duration("7x") + + def test_invalid_format_no_number(self): + """Test invalid format without number.""" + with pytest.raises(ValueError, match="Invalid time duration format"): + time_duration("d") + + with pytest.raises(ValueError, match="Invalid time duration format"): + time_duration("abc") + + def test_empty_string(self): + """Test empty string.""" + with pytest.raises(ValueError, match="Time duration cannot be empty"): + time_duration("") + + def test_none(self): + """Test None value.""" + with pytest.raises(ValueError, match="Time duration cannot be empty"): + time_duration(None) diff --git a/api/tests/unit_tests/libs/test_external_api.py b/api/tests/unit_tests/libs/test_external_api.py index a9edb913ea..c4c376a070 100644 --- a/api/tests/unit_tests/libs/test_external_api.py +++ b/api/tests/unit_tests/libs/test_external_api.py @@ -2,7 +2,9 @@ from flask import Blueprint, Flask from flask_restx import Resource from werkzeug.exceptions import BadRequest, Unauthorized +from constants import COOKIE_NAME_ACCESS_TOKEN, COOKIE_NAME_CSRF_TOKEN, COOKIE_NAME_REFRESH_TOKEN from core.errors.error import AppInvokeQuotaExceededError +from libs.exception import BaseHTTPException from libs.external_api import ExternalApi @@ -120,3 +122,66 @@ def test_external_api_param_mapping_and_quota_and_exc_info_none(): assert res.status_code in (400, 429) finally: ext.sys.exc_info = orig_exc_info # type: ignore[assignment] + + +def test_unauthorized_and_force_logout_clears_cookies(): + """Test that UnauthorizedAndForceLogout error clears auth cookies""" + + class UnauthorizedAndForceLogout(BaseHTTPException): + error_code = "unauthorized_and_force_logout" + description = "Unauthorized and force logout." + code = 401 + + app = Flask(__name__) + bp = Blueprint("test", __name__) + api = ExternalApi(bp) + + @api.route("/force-logout") + class ForceLogout(Resource): # type: ignore + def get(self): # type: ignore + raise UnauthorizedAndForceLogout() + + app.register_blueprint(bp, url_prefix="/api") + client = app.test_client() + + # Set cookies first + client.set_cookie(COOKIE_NAME_ACCESS_TOKEN, "test_access_token") + client.set_cookie(COOKIE_NAME_CSRF_TOKEN, "test_csrf_token") + client.set_cookie(COOKIE_NAME_REFRESH_TOKEN, "test_refresh_token") + + # Make request that should trigger cookie clearing + res = client.get("/api/force-logout") + + # Verify response + assert res.status_code == 401 + data = res.get_json() + assert data["code"] == "unauthorized_and_force_logout" + assert data["status"] == 401 + assert "WWW-Authenticate" in res.headers + + # Verify Set-Cookie headers are present to clear cookies + set_cookie_headers = res.headers.getlist("Set-Cookie") + assert len(set_cookie_headers) == 3, f"Expected 3 Set-Cookie headers, got {len(set_cookie_headers)}" + + # Verify each cookie is being cleared (empty value and expired) + cookie_names_found = set() + for cookie_header in set_cookie_headers: + # Check for cookie names + if COOKIE_NAME_ACCESS_TOKEN in cookie_header: + cookie_names_found.add(COOKIE_NAME_ACCESS_TOKEN) + assert '""' in cookie_header or "=" in cookie_header # Empty value + assert "Expires=Thu, 01 Jan 1970" in cookie_header # Expired + elif COOKIE_NAME_CSRF_TOKEN in cookie_header: + cookie_names_found.add(COOKIE_NAME_CSRF_TOKEN) + assert '""' in cookie_header or "=" in cookie_header + assert "Expires=Thu, 01 Jan 1970" in cookie_header + elif COOKIE_NAME_REFRESH_TOKEN in cookie_header: + cookie_names_found.add(COOKIE_NAME_REFRESH_TOKEN) + assert '""' in cookie_header or "=" in cookie_header + assert "Expires=Thu, 01 Jan 1970" in cookie_header + + # Verify all three cookies are present + assert len(cookie_names_found) == 3 + assert COOKIE_NAME_ACCESS_TOKEN in cookie_names_found + assert COOKIE_NAME_CSRF_TOKEN in cookie_names_found + assert COOKIE_NAME_REFRESH_TOKEN in cookie_names_found diff --git a/api/tests/unit_tests/libs/test_json_in_md_parser.py b/api/tests/unit_tests/libs/test_json_in_md_parser.py index 53fd0bea16..953f203e89 100644 --- a/api/tests/unit_tests/libs/test_json_in_md_parser.py +++ b/api/tests/unit_tests/libs/test_json_in_md_parser.py @@ -86,3 +86,24 @@ def test_parse_and_check_json_markdown_multiple_blocks_fails(): # opening fence to the last closing fence, causing JSON decode failure. with pytest.raises(OutputParserError): parse_and_check_json_markdown(src, []) + + +def test_parse_and_check_json_markdown_handles_think_fenced_and_raw_variants(): + expected = {"keywords": ["2"], "category_id": "2", "category_name": "2"} + cases = [ + """ + ```json + [{"keywords": ["2"], "category_id": "2", "category_name": "2"}] + ```, error: Expecting value: line 1 column 1 (char 0) + """, + """ + ```json + {"keywords": ["2"], "category_id": "2", "category_name": "2"} + ```, error: Extra data: line 2 column 5 (char 66) + """, + '{"keywords": ["2"], "category_id": "2", "category_name": "2"}', + '[{"keywords": ["2"], "category_id": "2", "category_name": "2"}]', + ] + for src in cases: + obj = parse_and_check_json_markdown(src, ["keywords", "category_id", "category_name"]) + assert obj == expected diff --git a/api/tests/unit_tests/libs/test_login.py b/api/tests/unit_tests/libs/test_login.py index 39671077d4..35155b4931 100644 --- a/api/tests/unit_tests/libs/test_login.py +++ b/api/tests/unit_tests/libs/test_login.py @@ -19,10 +19,15 @@ class MockUser(UserMixin): return self._is_authenticated +def mock_csrf_check(*args, **kwargs): + return + + class TestLoginRequired: """Test cases for login_required decorator.""" @pytest.fixture + @patch("libs.login.check_csrf_token", mock_csrf_check) def setup_app(self, app: Flask): """Set up Flask app with login manager.""" # Initialize login manager @@ -39,6 +44,7 @@ class TestLoginRequired: return app + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_authenticated_user_can_access_protected_view(self, setup_app: Flask): """Test that authenticated users can access protected views.""" @@ -53,6 +59,7 @@ class TestLoginRequired: result = protected_view() assert result == "Protected content" + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_unauthenticated_user_cannot_access_protected_view(self, setup_app: Flask): """Test that unauthenticated users are redirected.""" @@ -68,6 +75,7 @@ class TestLoginRequired: assert result == "Unauthorized" setup_app.login_manager.unauthorized.assert_called_once() + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_login_disabled_allows_unauthenticated_access(self, setup_app: Flask): """Test that LOGIN_DISABLED config bypasses authentication.""" @@ -87,6 +95,7 @@ class TestLoginRequired: # Ensure unauthorized was not called setup_app.login_manager.unauthorized.assert_not_called() + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_options_request_bypasses_authentication(self, setup_app: Flask): """Test that OPTIONS requests are exempt from authentication.""" @@ -103,6 +112,7 @@ class TestLoginRequired: # Ensure unauthorized was not called setup_app.login_manager.unauthorized.assert_not_called() + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_flask_2_compatibility(self, setup_app: Flask): """Test Flask 2.x compatibility with ensure_sync.""" @@ -120,6 +130,7 @@ class TestLoginRequired: assert result == "Synced content" setup_app.ensure_sync.assert_called_once() + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_flask_1_compatibility(self, setup_app: Flask): """Test Flask 1.x compatibility without ensure_sync.""" diff --git a/api/tests/unit_tests/libs/test_time_parser.py b/api/tests/unit_tests/libs/test_time_parser.py new file mode 100644 index 0000000000..83ff251272 --- /dev/null +++ b/api/tests/unit_tests/libs/test_time_parser.py @@ -0,0 +1,91 @@ +"""Unit tests for time parser utility.""" + +from datetime import UTC, datetime, timedelta + +from libs.time_parser import get_time_threshold, parse_time_duration + + +class TestParseTimeDuration: + """Test parse_time_duration function.""" + + def test_parse_days(self): + """Test parsing days.""" + result = parse_time_duration("7d") + assert result == timedelta(days=7) + + def test_parse_hours(self): + """Test parsing hours.""" + result = parse_time_duration("4h") + assert result == timedelta(hours=4) + + def test_parse_minutes(self): + """Test parsing minutes.""" + result = parse_time_duration("30m") + assert result == timedelta(minutes=30) + + def test_parse_seconds(self): + """Test parsing seconds.""" + result = parse_time_duration("30s") + assert result == timedelta(seconds=30) + + def test_parse_uppercase(self): + """Test parsing uppercase units.""" + result = parse_time_duration("7D") + assert result == timedelta(days=7) + + def test_parse_invalid_format(self): + """Test parsing invalid format.""" + result = parse_time_duration("7days") + assert result is None + + result = parse_time_duration("abc") + assert result is None + + result = parse_time_duration("7") + assert result is None + + def test_parse_empty_string(self): + """Test parsing empty string.""" + result = parse_time_duration("") + assert result is None + + def test_parse_none(self): + """Test parsing None.""" + result = parse_time_duration(None) + assert result is None + + +class TestGetTimeThreshold: + """Test get_time_threshold function.""" + + def test_get_threshold_days(self): + """Test getting threshold for days.""" + before = datetime.now(UTC) + result = get_time_threshold("7d") + after = datetime.now(UTC) + + assert result is not None + # Result should be approximately 7 days ago + expected = before - timedelta(days=7) + # Allow 1 second tolerance for test execution time + assert abs((result - expected).total_seconds()) < 1 + + def test_get_threshold_hours(self): + """Test getting threshold for hours.""" + before = datetime.now(UTC) + result = get_time_threshold("4h") + after = datetime.now(UTC) + + assert result is not None + expected = before - timedelta(hours=4) + assert abs((result - expected).total_seconds()) < 1 + + def test_get_threshold_invalid(self): + """Test getting threshold with invalid duration.""" + result = get_time_threshold("invalid") + assert result is None + + def test_get_threshold_none(self): + """Test getting threshold with None.""" + result = get_time_threshold(None) + assert result is None diff --git a/api/tests/unit_tests/libs/test_token.py b/api/tests/unit_tests/libs/test_token.py new file mode 100644 index 0000000000..22790fa4a6 --- /dev/null +++ b/api/tests/unit_tests/libs/test_token.py @@ -0,0 +1,23 @@ +from constants import COOKIE_NAME_ACCESS_TOKEN +from libs.token import extract_access_token + + +class MockRequest: + def __init__(self, headers: dict[str, str], cookies: dict[str, str], args: dict[str, str]): + self.headers: dict[str, str] = headers + self.cookies: dict[str, str] = cookies + self.args: dict[str, str] = args + + +def test_extract_access_token(): + def _mock_request(headers: dict[str, str], cookies: dict[str, str], args: dict[str, str]): + return MockRequest(headers, cookies, args) + + test_cases = [ + (_mock_request({"Authorization": "Bearer 123"}, {}, {}), "123"), + (_mock_request({}, {COOKIE_NAME_ACCESS_TOKEN: "123"}, {}), "123"), + (_mock_request({}, {}, {}), None), + (_mock_request({"Authorization": "Bearer_aaa 123"}, {}, {}), None), + ] + for request, expected in test_cases: + assert extract_access_token(request) == expected # pyright: ignore[reportArgumentType] diff --git a/api/tests/unit_tests/repositories/test_workflow_run_repository.py b/api/tests/unit_tests/repositories/test_workflow_run_repository.py new file mode 100644 index 0000000000..8f47f0df48 --- /dev/null +++ b/api/tests/unit_tests/repositories/test_workflow_run_repository.py @@ -0,0 +1,251 @@ +"""Unit tests for workflow run repository with status filter.""" + +import uuid +from unittest.mock import MagicMock + +import pytest +from sqlalchemy.orm import sessionmaker + +from models import WorkflowRun, WorkflowRunTriggeredFrom +from repositories.sqlalchemy_api_workflow_run_repository import DifyAPISQLAlchemyWorkflowRunRepository + + +class TestDifyAPISQLAlchemyWorkflowRunRepository: + """Test workflow run repository with status filtering.""" + + @pytest.fixture + def mock_session_maker(self): + """Create a mock session maker.""" + return MagicMock(spec=sessionmaker) + + @pytest.fixture + def repository(self, mock_session_maker): + """Create repository instance with mock session.""" + return DifyAPISQLAlchemyWorkflowRunRepository(mock_session_maker) + + def test_get_paginated_workflow_runs_without_status(self, repository, mock_session_maker): + """Test getting paginated workflow runs without status filter.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + mock_runs = [MagicMock(spec=WorkflowRun) for _ in range(3)] + mock_session.scalars.return_value.all.return_value = mock_runs + + # Act + result = repository.get_paginated_workflow_runs( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + limit=20, + last_id=None, + status=None, + ) + + # Assert + assert len(result.data) == 3 + assert result.limit == 20 + assert result.has_more is False + + def test_get_paginated_workflow_runs_with_status_filter(self, repository, mock_session_maker): + """Test getting paginated workflow runs with status filter.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + mock_runs = [MagicMock(spec=WorkflowRun, status="succeeded") for _ in range(2)] + mock_session.scalars.return_value.all.return_value = mock_runs + + # Act + result = repository.get_paginated_workflow_runs( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + limit=20, + last_id=None, + status="succeeded", + ) + + # Assert + assert len(result.data) == 2 + assert all(run.status == "succeeded" for run in result.data) + + def test_get_workflow_runs_count_without_status(self, repository, mock_session_maker): + """Test getting workflow runs count without status filter.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + # Mock the GROUP BY query results + mock_results = [ + ("succeeded", 5), + ("failed", 2), + ("running", 1), + ] + mock_session.execute.return_value.all.return_value = mock_results + + # Act + result = repository.get_workflow_runs_count( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + status=None, + ) + + # Assert + assert result["total"] == 8 + assert result["succeeded"] == 5 + assert result["failed"] == 2 + assert result["running"] == 1 + assert result["stopped"] == 0 + assert result["partial-succeeded"] == 0 + + def test_get_workflow_runs_count_with_status_filter(self, repository, mock_session_maker): + """Test getting workflow runs count with status filter.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + # Mock the count query for succeeded status + mock_session.scalar.return_value = 5 + + # Act + result = repository.get_workflow_runs_count( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + status="succeeded", + ) + + # Assert + assert result["total"] == 5 + assert result["succeeded"] == 5 + assert result["running"] == 0 + assert result["failed"] == 0 + assert result["stopped"] == 0 + assert result["partial-succeeded"] == 0 + + def test_get_workflow_runs_count_with_invalid_status(self, repository, mock_session_maker): + """Test that invalid status is still counted in total but not in any specific status.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + # Mock count query returning 0 for invalid status + mock_session.scalar.return_value = 0 + + # Act + result = repository.get_workflow_runs_count( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + status="invalid_status", + ) + + # Assert + assert result["total"] == 0 + assert all(result[status] == 0 for status in ["running", "succeeded", "failed", "stopped", "partial-succeeded"]) + + def test_get_workflow_runs_count_with_time_range(self, repository, mock_session_maker): + """Test getting workflow runs count with time range filter verifies SQL query construction.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + # Mock the GROUP BY query results + mock_results = [ + ("succeeded", 3), + ("running", 2), + ] + mock_session.execute.return_value.all.return_value = mock_results + + # Act + result = repository.get_workflow_runs_count( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + status=None, + time_range="1d", + ) + + # Assert results + assert result["total"] == 5 + assert result["succeeded"] == 3 + assert result["running"] == 2 + assert result["failed"] == 0 + + # Verify that execute was called (which means GROUP BY query was used) + assert mock_session.execute.called, "execute should have been called for GROUP BY query" + + # Verify SQL query includes time filter by checking the statement + call_args = mock_session.execute.call_args + assert call_args is not None, "execute should have been called with a statement" + + # The first argument should be the SQL statement + stmt = call_args[0][0] + # Convert to string to inspect the query + query_str = str(stmt.compile(compile_kwargs={"literal_binds": True})) + + # Verify the query includes created_at filter + # The query should have a WHERE clause with created_at comparison + assert "created_at" in query_str.lower() or "workflow_runs.created_at" in query_str.lower(), ( + "Query should include created_at filter for time range" + ) + + def test_get_workflow_runs_count_with_status_and_time_range(self, repository, mock_session_maker): + """Test getting workflow runs count with both status and time range filters verifies SQL query.""" + # Arrange + tenant_id = str(uuid.uuid4()) + app_id = str(uuid.uuid4()) + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + + # Mock the count query for running status within time range + mock_session.scalar.return_value = 2 + + # Act + result = repository.get_workflow_runs_count( + tenant_id=tenant_id, + app_id=app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + status="running", + time_range="1d", + ) + + # Assert results + assert result["total"] == 2 + assert result["running"] == 2 + assert result["succeeded"] == 0 + assert result["failed"] == 0 + + # Verify that scalar was called (which means COUNT query was used) + assert mock_session.scalar.called, "scalar should have been called for count query" + + # Verify SQL query includes both status and time filter + call_args = mock_session.scalar.call_args + assert call_args is not None, "scalar should have been called with a statement" + + # The first argument should be the SQL statement + stmt = call_args[0][0] + # Convert to string to inspect the query + query_str = str(stmt.compile(compile_kwargs={"literal_binds": True})) + + # Verify the query includes both filters + assert "created_at" in query_str.lower() or "workflow_runs.created_at" in query_str.lower(), ( + "Query should include created_at filter for time range" + ) + assert "status" in query_str.lower() or "workflow_runs.status" in query_str.lower(), ( + "Query should include status filter" + ) diff --git a/api/tests/unit_tests/services/auth/test_auth_integration.py b/api/tests/unit_tests/services/auth/test_auth_integration.py index acfc5cc526..3832a0b8b2 100644 --- a/api/tests/unit_tests/services/auth/test_auth_integration.py +++ b/api/tests/unit_tests/services/auth/test_auth_integration.py @@ -181,14 +181,11 @@ class TestAuthIntegration: ) def test_all_providers_factory_creation(self, provider, credentials): """Test factory creation for all supported providers""" - try: - auth_class = ApiKeyAuthFactory.get_apikey_auth_factory(provider) - assert auth_class is not None + auth_class = ApiKeyAuthFactory.get_apikey_auth_factory(provider) + assert auth_class is not None - factory = ApiKeyAuthFactory(provider, credentials) - assert factory.auth is not None - except ImportError: - pytest.skip(f"Provider {provider} not implemented yet") + factory = ApiKeyAuthFactory(provider, credentials) + assert factory.auth is not None def _create_success_response(self, status_code=200): """Create successful HTTP response mock""" diff --git a/api/tests/unit_tests/services/test_dataset_service_delete_dataset.py b/api/tests/unit_tests/services/test_dataset_service_delete_dataset.py new file mode 100644 index 0000000000..cc718c9997 --- /dev/null +++ b/api/tests/unit_tests/services/test_dataset_service_delete_dataset.py @@ -0,0 +1,216 @@ +from unittest.mock import Mock, patch + +import pytest + +from models.account import Account, TenantAccountRole +from models.dataset import Dataset +from services.dataset_service import DatasetService + + +class DatasetDeleteTestDataFactory: + """Factory class for creating test data and mock objects for dataset delete tests.""" + + @staticmethod + def create_dataset_mock( + dataset_id: str = "dataset-123", + tenant_id: str = "test-tenant-123", + created_by: str = "creator-456", + doc_form: str | None = None, + indexing_technique: str | None = "high_quality", + **kwargs, + ) -> Mock: + """Create a mock dataset with specified attributes.""" + dataset = Mock(spec=Dataset) + dataset.id = dataset_id + dataset.tenant_id = tenant_id + dataset.created_by = created_by + dataset.doc_form = doc_form + dataset.indexing_technique = indexing_technique + for key, value in kwargs.items(): + setattr(dataset, key, value) + return dataset + + @staticmethod + def create_user_mock( + user_id: str = "user-789", + tenant_id: str = "test-tenant-123", + role: TenantAccountRole = TenantAccountRole.ADMIN, + **kwargs, + ) -> Mock: + """Create a mock user with specified attributes.""" + user = Mock(spec=Account) + user.id = user_id + user.current_tenant_id = tenant_id + user.current_role = role + for key, value in kwargs.items(): + setattr(user, key, value) + return user + + +class TestDatasetServiceDeleteDataset: + """ + Comprehensive unit tests for DatasetService.delete_dataset method. + + This test suite covers all deletion scenarios including: + - Normal dataset deletion with documents + - Empty dataset deletion (no documents, doc_form is None) + - Dataset deletion with missing indexing_technique + - Permission checks + - Event handling + + This test suite provides regression protection for issue #27073. + """ + + @pytest.fixture + def mock_dataset_service_dependencies(self): + """Common mock setup for dataset service dependencies.""" + with ( + patch("services.dataset_service.DatasetService.get_dataset") as mock_get_dataset, + patch("services.dataset_service.DatasetService.check_dataset_permission") as mock_check_perm, + patch("extensions.ext_database.db.session") as mock_db, + patch("services.dataset_service.dataset_was_deleted") as mock_dataset_was_deleted, + ): + yield { + "get_dataset": mock_get_dataset, + "check_permission": mock_check_perm, + "db_session": mock_db, + "dataset_was_deleted": mock_dataset_was_deleted, + } + + def test_delete_dataset_with_documents_success(self, mock_dataset_service_dependencies): + """ + Test successful deletion of a dataset with documents. + + This test verifies: + - Dataset is retrieved correctly + - Permission check is performed + - dataset_was_deleted event is sent + - Dataset is deleted from database + - Method returns True + """ + # Arrange + dataset = DatasetDeleteTestDataFactory.create_dataset_mock( + doc_form="text_model", indexing_technique="high_quality" + ) + user = DatasetDeleteTestDataFactory.create_user_mock() + + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + # Act + result = DatasetService.delete_dataset(dataset.id, user) + + # Assert + assert result is True + mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset.id) + mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user) + mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].delete.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].commit.assert_called_once() + + def test_delete_empty_dataset_success(self, mock_dataset_service_dependencies): + """ + Test successful deletion of an empty dataset (no documents, doc_form is None). + + This test verifies that: + - Empty datasets can be deleted without errors + - dataset_was_deleted event is sent (event handler will skip cleanup if doc_form is None) + - Dataset is deleted from database + - Method returns True + + This is the primary test for issue #27073 where deleting an empty dataset + caused internal server error due to assertion failure in event handlers. + """ + # Arrange + dataset = DatasetDeleteTestDataFactory.create_dataset_mock(doc_form=None, indexing_technique=None) + user = DatasetDeleteTestDataFactory.create_user_mock() + + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + # Act + result = DatasetService.delete_dataset(dataset.id, user) + + # Assert - Verify complete deletion flow + assert result is True + mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset.id) + mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user) + mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].delete.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].commit.assert_called_once() + + def test_delete_dataset_with_partial_none_values(self, mock_dataset_service_dependencies): + """ + Test deletion of dataset with partial None values. + + This test verifies that datasets with partial None values (e.g., doc_form exists + but indexing_technique is None) can be deleted successfully. The event handler + will skip cleanup if any required field is None. + + Improvement based on Gemini Code Assist suggestion: Added comprehensive assertions + to verify all core deletion operations are performed, not just event sending. + """ + # Arrange + dataset = DatasetDeleteTestDataFactory.create_dataset_mock(doc_form="text_model", indexing_technique=None) + user = DatasetDeleteTestDataFactory.create_user_mock() + + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + # Act + result = DatasetService.delete_dataset(dataset.id, user) + + # Assert - Verify complete deletion flow (Gemini suggestion implemented) + assert result is True + mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset.id) + mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user) + mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].delete.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].commit.assert_called_once() + + def test_delete_dataset_with_doc_form_none_indexing_technique_exists(self, mock_dataset_service_dependencies): + """ + Test deletion of dataset where doc_form is None but indexing_technique exists. + + This edge case can occur in certain dataset configurations and should be handled + gracefully by the event handler's conditional check. + """ + # Arrange + dataset = DatasetDeleteTestDataFactory.create_dataset_mock(doc_form=None, indexing_technique="high_quality") + user = DatasetDeleteTestDataFactory.create_user_mock() + + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + # Act + result = DatasetService.delete_dataset(dataset.id, user) + + # Assert - Verify complete deletion flow + assert result is True + mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset.id) + mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user) + mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].delete.assert_called_once_with(dataset) + mock_dataset_service_dependencies["db_session"].commit.assert_called_once() + + def test_delete_dataset_not_found(self, mock_dataset_service_dependencies): + """ + Test deletion attempt when dataset doesn't exist. + + This test verifies that: + - Method returns False when dataset is not found + - No deletion operations are performed + - No events are sent + """ + # Arrange + dataset_id = "non-existent-dataset" + user = DatasetDeleteTestDataFactory.create_user_mock() + + mock_dataset_service_dependencies["get_dataset"].return_value = None + + # Act + result = DatasetService.delete_dataset(dataset_id, user) + + # Assert + assert result is False + mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset_id) + mock_dataset_service_dependencies["check_permission"].assert_not_called() + mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_not_called() + mock_dataset_service_dependencies["db_session"].delete.assert_not_called() + mock_dataset_service_dependencies["db_session"].commit.assert_not_called() diff --git a/api/tests/unit_tests/services/test_metadata_bug_complete.py b/api/tests/unit_tests/services/test_metadata_bug_complete.py index 31fe9b2868..bbfa9da15e 100644 --- a/api/tests/unit_tests/services/test_metadata_bug_complete.py +++ b/api/tests/unit_tests/services/test_metadata_bug_complete.py @@ -41,7 +41,10 @@ class TestMetadataBugCompleteValidation: mock_user.current_tenant_id = "tenant-123" mock_user.id = "user-456" - with patch("services.metadata_service.current_user", mock_user): + with patch( + "services.metadata_service.current_account_with_tenant", + return_value=(mock_user, mock_user.current_tenant_id), + ): # Should crash with TypeError with pytest.raises(TypeError, match="object of type 'NoneType' has no len"): MetadataService.create_metadata("dataset-123", mock_metadata_args) @@ -51,7 +54,10 @@ class TestMetadataBugCompleteValidation: mock_user.current_tenant_id = "tenant-123" mock_user.id = "user-456" - with patch("services.metadata_service.current_user", mock_user): + with patch( + "services.metadata_service.current_account_with_tenant", + return_value=(mock_user, mock_user.current_tenant_id), + ): with pytest.raises(TypeError, match="object of type 'NoneType' has no len"): MetadataService.update_metadata_name("dataset-123", "metadata-456", None) @@ -74,9 +80,11 @@ class TestMetadataBugCompleteValidation: def test_4_fixed_api_layer_rejects_null(self, app): """Test Layer 4: Fixed API configuration properly rejects null values.""" # Test Console API create endpoint (fixed) - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=str, required=True, nullable=False, location="json") + ) with app.test_request_context(json={"type": None, "name": None}, content_type="application/json"): with pytest.raises(BadRequest): @@ -94,9 +102,11 @@ class TestMetadataBugCompleteValidation: def test_5_fixed_api_accepts_valid_values(self, app): """Test that fixed API still accepts valid non-null values.""" - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=str, required=True, nullable=False, location="json") + ) with app.test_request_context(json={"type": "string", "name": "valid_name"}, content_type="application/json"): args = parser.parse_args() @@ -106,9 +116,11 @@ class TestMetadataBugCompleteValidation: def test_6_simulated_buggy_behavior(self, app): """Test simulating the original buggy behavior with nullable=True.""" # Simulate the old buggy configuration - buggy_parser = reqparse.RequestParser() - buggy_parser.add_argument("type", type=str, required=True, nullable=True, location="json") - buggy_parser.add_argument("name", type=str, required=True, nullable=True, location="json") + buggy_parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=True, location="json") + .add_argument("name", type=str, required=True, nullable=True, location="json") + ) with app.test_request_context(json={"type": None, "name": None}, content_type="application/json"): # This would pass in the buggy version diff --git a/api/tests/unit_tests/services/test_metadata_nullable_bug.py b/api/tests/unit_tests/services/test_metadata_nullable_bug.py index c8cd7025c2..c8a1a70422 100644 --- a/api/tests/unit_tests/services/test_metadata_nullable_bug.py +++ b/api/tests/unit_tests/services/test_metadata_nullable_bug.py @@ -29,7 +29,10 @@ class TestMetadataNullableBug: mock_user.current_tenant_id = "tenant-123" mock_user.id = "user-456" - with patch("services.metadata_service.current_user", mock_user): + with patch( + "services.metadata_service.current_account_with_tenant", + return_value=(mock_user, mock_user.current_tenant_id), + ): # This should crash with TypeError when calling len(None) with pytest.raises(TypeError, match="object of type 'NoneType' has no len"): MetadataService.create_metadata("dataset-123", mock_metadata_args) @@ -40,7 +43,10 @@ class TestMetadataNullableBug: mock_user.current_tenant_id = "tenant-123" mock_user.id = "user-456" - with patch("services.metadata_service.current_user", mock_user): + with patch( + "services.metadata_service.current_account_with_tenant", + return_value=(mock_user, mock_user.current_tenant_id), + ): # This should crash with TypeError when calling len(None) with pytest.raises(TypeError, match="object of type 'NoneType' has no len"): MetadataService.update_metadata_name("dataset-123", "metadata-456", None) @@ -48,9 +54,11 @@ class TestMetadataNullableBug: def test_api_parser_accepts_null_values(self, app): """Test that API parser configuration incorrectly accepts null values.""" # Simulate the current API parser configuration - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=True, location="json") - parser.add_argument("name", type=str, required=True, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=True, location="json") + .add_argument("name", type=str, required=True, nullable=True, location="json") + ) # Simulate request data with null values with app.test_request_context(json={"type": None, "name": None}, content_type="application/json"): @@ -66,9 +74,11 @@ class TestMetadataNullableBug: def test_integration_bug_scenario(self, app): """Test the complete bug scenario from API to service layer.""" # Step 1: API parser accepts null values (current buggy behavior) - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=True, location="json") - parser.add_argument("name", type=str, required=True, nullable=True, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=True, location="json") + .add_argument("name", type=str, required=True, nullable=True, location="json") + ) with app.test_request_context(json={"type": None, "name": None}, content_type="application/json"): args = parser.parse_args() @@ -88,7 +98,10 @@ class TestMetadataNullableBug: mock_user.current_tenant_id = "tenant-123" mock_user.id = "user-456" - with patch("services.metadata_service.current_user", mock_user): + with patch( + "services.metadata_service.current_account_with_tenant", + return_value=(mock_user, mock_user.current_tenant_id), + ): # Step 4: Service layer crashes on len(None) with pytest.raises(TypeError, match="object of type 'NoneType' has no len"): MetadataService.create_metadata("dataset-123", mock_metadata_args) @@ -96,9 +109,11 @@ class TestMetadataNullableBug: def test_correct_nullable_false_configuration_works(self, app): """Test that the correct nullable=False configuration works as expected.""" # This tests the FIXED configuration - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") + parser = ( + reqparse.RequestParser() + .add_argument("type", type=str, required=True, nullable=False, location="json") + .add_argument("name", type=str, required=True, nullable=False, location="json") + ) with app.test_request_context(json={"type": None, "name": None}, content_type="application/json"): # This should fail with BadRequest due to nullable=False diff --git a/api/tests/unit_tests/tools/test_api_tool.py b/api/tests/unit_tests/tools/test_api_tool.py new file mode 100644 index 0000000000..4d5683dcbd --- /dev/null +++ b/api/tests/unit_tests/tools/test_api_tool.py @@ -0,0 +1,249 @@ +import json +import operator +from typing import TypeVar +from unittest.mock import Mock, patch + +import httpx +import pytest + +from core.tools.__base.tool_runtime import ToolRuntime +from core.tools.custom_tool.tool import ApiTool +from core.tools.entities.common_entities import I18nObject +from core.tools.entities.tool_bundle import ApiToolBundle +from core.tools.entities.tool_entities import ( + ToolEntity, + ToolIdentity, + ToolInvokeMessage, +) + +_T = TypeVar("_T") + + +def _get_message_by_type(msgs: list[ToolInvokeMessage], msg_type: type[_T]) -> ToolInvokeMessage | None: + return next((i for i in msgs if isinstance(i.message, msg_type)), None) + + +class TestApiToolInvoke: + """Test suite for ApiTool._invoke method to ensure JSON responses are properly serialized.""" + + def setup_method(self): + """Setup test fixtures.""" + # Create a mock tool entity + self.mock_tool_identity = ToolIdentity( + author="test", + name="test_api_tool", + label=I18nObject(en_US="Test API Tool", zh_Hans="测试API工具"), + provider="test_provider", + ) + self.mock_tool_entity = ToolEntity(identity=self.mock_tool_identity) + + # Create a mock API bundle + self.mock_api_bundle = ApiToolBundle( + server_url="https://api.example.com/test", + method="GET", + openapi={}, + operation_id="test_operation", + parameters=[], + author="test_author", + ) + + # Create a mock runtime + self.mock_runtime = Mock(spec=ToolRuntime) + self.mock_runtime.credentials = {"auth_type": "none"} + + # Create the ApiTool instance + self.api_tool = ApiTool( + entity=self.mock_tool_entity, + api_bundle=self.mock_api_bundle, + runtime=self.mock_runtime, + provider_id="test_provider", + ) + + @patch("core.tools.custom_tool.tool.ssrf_proxy.get") + def test_invoke_with_json_response_creates_text_message_with_serialized_json(self, mock_get: Mock) -> None: + """Test that when upstream returns JSON, the output Text message contains JSON-serialized string.""" + # Setup mock response with JSON content + json_response_data = { + "key": "value", + "number": 123, + "nested": {"inner": "data"}, + } + mock_response = Mock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.content = json.dumps(json_response_data).encode("utf-8") + mock_response.json.return_value = json_response_data + mock_response.text = json.dumps(json_response_data) + mock_response.headers = {"content-type": "application/json"} + mock_get.return_value = mock_response + + # Invoke the tool + result_generator = self.api_tool._invoke(user_id="test_user", tool_parameters={}) + + # Get the result from the generator + result = list(result_generator) + assert len(result) == 2 + + # Verify _invoke yields text message + text_message = _get_message_by_type(result, ToolInvokeMessage.TextMessage) + assert text_message is not None, "_invoke should yield a text message" + assert isinstance(text_message, ToolInvokeMessage) + assert text_message.type == ToolInvokeMessage.MessageType.TEXT + assert text_message.message is not None + # Verify the text contains the JSON-serialized string + # Check if message is a TextMessage + assert isinstance(text_message.message, ToolInvokeMessage.TextMessage) + # Verify it's a valid JSON string and equals to the mock response + parsed_back = json.loads(text_message.message.text) + assert parsed_back == json_response_data + + # Verify _invoke yields json message + json_message = _get_message_by_type(result, ToolInvokeMessage.JsonMessage) + assert json_message is not None, "_invoke should yield a JSON message" + assert isinstance(json_message, ToolInvokeMessage) + assert json_message.type == ToolInvokeMessage.MessageType.JSON + assert json_message.message is not None + + assert isinstance(json_message.message, ToolInvokeMessage.JsonMessage) + assert json_message.message.json_object == json_response_data + + @patch("core.tools.custom_tool.tool.ssrf_proxy.get") + @pytest.mark.parametrize( + "test_case", + [ + ( + "array", + [ + {"id": 1, "name": "Item 1", "active": True}, + {"id": 2, "name": "Item 2", "active": False}, + {"id": 3, "name": "项目 3", "active": True}, + ], + ), + ( + "string", + "string", + ), + ( + "number", + 123.456, + ), + ( + "boolean", + True, + ), + ( + "null", + None, + ), + ], + ids=operator.itemgetter(0), + ) + def test_invoke_with_non_dict_json_response_creates_text_message_with_serialized_json( + self, mock_get: Mock, test_case + ) -> None: + """Test that when upstream returns a non-dict JSON, the output Text message contains JSON-serialized string.""" + # Setup mock response with non-dict JSON content + _, json_value = test_case + mock_response = Mock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.content = json.dumps(json_value).encode("utf-8") + mock_response.json.return_value = json_value + mock_response.text = json.dumps(json_value) + mock_response.headers = {"content-type": "application/json"} + mock_get.return_value = mock_response + + # Invoke the tool + result_generator = self.api_tool._invoke(user_id="test_user", tool_parameters={}) + + # Get the result from the generator + result = list(result_generator) + assert len(result) == 1 + + # Verify _invoke yields a text message + text_message = _get_message_by_type(result, ToolInvokeMessage.TextMessage) + assert text_message is not None, "_invoke should yield a text message containing the serialized JSON." + assert isinstance(text_message, ToolInvokeMessage) + assert text_message.type == ToolInvokeMessage.MessageType.TEXT + assert text_message.message is not None + # Verify the text contains the JSON-serialized string + # Check if message is a TextMessage + assert isinstance(text_message.message, ToolInvokeMessage.TextMessage) + # Verify it's a valid JSON string + parsed_back = json.loads(text_message.message.text) + assert parsed_back == json_value + + # Verify _invoke yields json message + json_message = _get_message_by_type(result, ToolInvokeMessage.JsonMessage) + assert json_message is None, "_invoke should not yield a JSON message for JSON array response" + + @patch("core.tools.custom_tool.tool.ssrf_proxy.get") + def test_invoke_with_text_response_creates_text_message_with_original_text(self, mock_get: Mock) -> None: + """Test that when upstream returns plain text, the output Text message contains the original text.""" + # Setup mock response with plain text content + text_response_data = "This is a plain text response" + mock_response = Mock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.content = text_response_data.encode("utf-8") + mock_response.json.side_effect = json.JSONDecodeError("Expecting value", "doc", 0) + mock_response.text = text_response_data + mock_response.headers = {"content-type": "text/plain"} + mock_get.return_value = mock_response + + # Invoke the tool + result_generator = self.api_tool._invoke(user_id="test_user", tool_parameters={}) + + # Get the result from the generator + result = list(result_generator) + assert len(result) == 1 + + # Verify it's a text message with the original text + message = result[0] + assert isinstance(message, ToolInvokeMessage) + assert message.type == ToolInvokeMessage.MessageType.TEXT + assert message.message is not None + # Check if message is a TextMessage + assert isinstance(message.message, ToolInvokeMessage.TextMessage) + assert message.message.text == text_response_data + + @patch("core.tools.custom_tool.tool.ssrf_proxy.get") + def test_invoke_with_empty_response(self, mock_get: Mock) -> None: + """Test that empty responses are handled correctly.""" + # Setup mock response with empty content + mock_response = Mock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.content = b"" + mock_response.headers = {"content-type": "application/json"} + mock_get.return_value = mock_response + + # Invoke the tool + result_generator = self.api_tool._invoke(user_id="test_user", tool_parameters={}) + + # Get the result from the generator + result = list(result_generator) + assert len(result) == 1 + + # Verify it's a text message with the empty response message + message = result[0] + assert isinstance(message, ToolInvokeMessage) + assert message.type == ToolInvokeMessage.MessageType.TEXT + assert message.message is not None + # Check if message is a TextMessage + assert isinstance(message.message, ToolInvokeMessage.TextMessage) + assert "Empty response from the tool" in message.message.text + + @patch("core.tools.custom_tool.tool.ssrf_proxy.get") + def test_invoke_with_error_response(self, mock_get: Mock) -> None: + """Test that error responses are handled correctly.""" + # Setup mock response with error status code + mock_response = Mock(spec=httpx.Response) + mock_response.status_code = 404 + mock_response.text = "Not Found" + mock_get.return_value = mock_response + + result_generator = self.api_tool._invoke(user_id="test_user", tool_parameters={}) + + # Invoke the tool and expect an error + with pytest.raises(Exception) as exc_info: + list(result_generator) # Consume the generator to trigger the error + + # Verify the error message + assert "Request failed with status code 404" in str(exc_info.value) diff --git a/api/uv.lock b/api/uv.lock index 050bd4ec1d..e7e51acedf 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -2,12 +2,18 @@ version = 1 revision = 3 requires-python = ">=3.11, <3.13" resolution-markers = [ - "python_full_version >= '3.12.4' and sys_platform == 'linux'", - "python_full_version >= '3.12.4' and sys_platform != 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.12.4' and sys_platform == 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.12.4' and sys_platform != 'linux'", - "python_full_version < '3.12' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform != 'linux'", + "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform != 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform != 'linux'", + "python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform != 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.12.4' and platform_python_implementation == 'PyPy' and sys_platform != 'linux'", + "python_full_version < '3.12' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", + "python_full_version < '3.12' and platform_python_implementation != 'PyPy' and sys_platform != 'linux'", + "python_full_version < '3.12' and platform_python_implementation == 'PyPy' and sys_platform == 'linux'", + "python_full_version < '3.12' and platform_python_implementation == 'PyPy' and sys_platform != 'linux'", ] [[package]] @@ -42,7 +48,7 @@ wheels = [ [[package]] name = "aiohttp" -version = "3.12.15" +version = "3.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, @@ -53,42 +59,42 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/d92a237d8802ca88483906c388f7c201bbe96cd80a165ffd0ac2f6a8d59f/aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2", size = 7823716, upload-time = "2025-07-29T05:52:32.215Z" } +sdist = { url = "https://files.pythonhosted.org/packages/62/f1/8515650ac3121a9e55c7b217c60e7fae3e0134b5acfe65691781b5356929/aiohttp-3.13.0.tar.gz", hash = "sha256:378dbc57dd8cf341ce243f13fa1fa5394d68e2e02c15cd5f28eae35a70ec7f67", size = 7832348, upload-time = "2025-10-06T19:58:48.089Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/19/9e86722ec8e835959bd97ce8c1efa78cf361fa4531fca372551abcc9cdd6/aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117", size = 711246, upload-time = "2025-07-29T05:50:15.937Z" }, - { url = "https://files.pythonhosted.org/packages/71/f9/0a31fcb1a7d4629ac9d8f01f1cb9242e2f9943f47f5d03215af91c3c1a26/aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe", size = 483515, upload-time = "2025-07-29T05:50:17.442Z" }, - { url = "https://files.pythonhosted.org/packages/62/6c/94846f576f1d11df0c2e41d3001000527c0fdf63fce7e69b3927a731325d/aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9", size = 471776, upload-time = "2025-07-29T05:50:19.568Z" }, - { url = "https://files.pythonhosted.org/packages/f8/6c/f766d0aaafcee0447fad0328da780d344489c042e25cd58fde566bf40aed/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5", size = 1741977, upload-time = "2025-07-29T05:50:21.665Z" }, - { url = "https://files.pythonhosted.org/packages/17/e5/fb779a05ba6ff44d7bc1e9d24c644e876bfff5abe5454f7b854cace1b9cc/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728", size = 1690645, upload-time = "2025-07-29T05:50:23.333Z" }, - { url = "https://files.pythonhosted.org/packages/37/4e/a22e799c2035f5d6a4ad2cf8e7c1d1bd0923192871dd6e367dafb158b14c/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16", size = 1789437, upload-time = "2025-07-29T05:50:25.007Z" }, - { url = "https://files.pythonhosted.org/packages/28/e5/55a33b991f6433569babb56018b2fb8fb9146424f8b3a0c8ecca80556762/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0", size = 1828482, upload-time = "2025-07-29T05:50:26.693Z" }, - { url = "https://files.pythonhosted.org/packages/c6/82/1ddf0ea4f2f3afe79dffed5e8a246737cff6cbe781887a6a170299e33204/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b", size = 1730944, upload-time = "2025-07-29T05:50:28.382Z" }, - { url = "https://files.pythonhosted.org/packages/1b/96/784c785674117b4cb3877522a177ba1b5e4db9ce0fd519430b5de76eec90/aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd", size = 1668020, upload-time = "2025-07-29T05:50:30.032Z" }, - { url = "https://files.pythonhosted.org/packages/12/8a/8b75f203ea7e5c21c0920d84dd24a5c0e971fe1e9b9ebbf29ae7e8e39790/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8", size = 1716292, upload-time = "2025-07-29T05:50:31.983Z" }, - { url = "https://files.pythonhosted.org/packages/47/0b/a1451543475bb6b86a5cfc27861e52b14085ae232896a2654ff1231c0992/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50", size = 1711451, upload-time = "2025-07-29T05:50:33.989Z" }, - { url = "https://files.pythonhosted.org/packages/55/fd/793a23a197cc2f0d29188805cfc93aa613407f07e5f9da5cd1366afd9d7c/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676", size = 1691634, upload-time = "2025-07-29T05:50:35.846Z" }, - { url = "https://files.pythonhosted.org/packages/ca/bf/23a335a6670b5f5dfc6d268328e55a22651b440fca341a64fccf1eada0c6/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7", size = 1785238, upload-time = "2025-07-29T05:50:37.597Z" }, - { url = "https://files.pythonhosted.org/packages/57/4f/ed60a591839a9d85d40694aba5cef86dde9ee51ce6cca0bb30d6eb1581e7/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7", size = 1805701, upload-time = "2025-07-29T05:50:39.591Z" }, - { url = "https://files.pythonhosted.org/packages/85/e0/444747a9455c5de188c0f4a0173ee701e2e325d4b2550e9af84abb20cdba/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685", size = 1718758, upload-time = "2025-07-29T05:50:41.292Z" }, - { url = "https://files.pythonhosted.org/packages/36/ab/1006278d1ffd13a698e5dd4bfa01e5878f6bddefc296c8b62649753ff249/aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b", size = 428868, upload-time = "2025-07-29T05:50:43.063Z" }, - { url = "https://files.pythonhosted.org/packages/10/97/ad2b18700708452400278039272032170246a1bf8ec5d832772372c71f1a/aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d", size = 453273, upload-time = "2025-07-29T05:50:44.613Z" }, - { url = "https://files.pythonhosted.org/packages/63/97/77cb2450d9b35f517d6cf506256bf4f5bda3f93a66b4ad64ba7fc917899c/aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7", size = 702333, upload-time = "2025-07-29T05:50:46.507Z" }, - { url = "https://files.pythonhosted.org/packages/83/6d/0544e6b08b748682c30b9f65640d006e51f90763b41d7c546693bc22900d/aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444", size = 476948, upload-time = "2025-07-29T05:50:48.067Z" }, - { url = "https://files.pythonhosted.org/packages/3a/1d/c8c40e611e5094330284b1aea8a4b02ca0858f8458614fa35754cab42b9c/aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d", size = 469787, upload-time = "2025-07-29T05:50:49.669Z" }, - { url = "https://files.pythonhosted.org/packages/38/7d/b76438e70319796bfff717f325d97ce2e9310f752a267bfdf5192ac6082b/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c", size = 1716590, upload-time = "2025-07-29T05:50:51.368Z" }, - { url = "https://files.pythonhosted.org/packages/79/b1/60370d70cdf8b269ee1444b390cbd72ce514f0d1cd1a715821c784d272c9/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0", size = 1699241, upload-time = "2025-07-29T05:50:53.628Z" }, - { url = "https://files.pythonhosted.org/packages/a3/2b/4968a7b8792437ebc12186db31523f541943e99bda8f30335c482bea6879/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab", size = 1754335, upload-time = "2025-07-29T05:50:55.394Z" }, - { url = "https://files.pythonhosted.org/packages/fb/c1/49524ed553f9a0bec1a11fac09e790f49ff669bcd14164f9fab608831c4d/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb", size = 1800491, upload-time = "2025-07-29T05:50:57.202Z" }, - { url = "https://files.pythonhosted.org/packages/de/5e/3bf5acea47a96a28c121b167f5ef659cf71208b19e52a88cdfa5c37f1fcc/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545", size = 1719929, upload-time = "2025-07-29T05:50:59.192Z" }, - { url = "https://files.pythonhosted.org/packages/39/94/8ae30b806835bcd1cba799ba35347dee6961a11bd507db634516210e91d8/aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c", size = 1635733, upload-time = "2025-07-29T05:51:01.394Z" }, - { url = "https://files.pythonhosted.org/packages/7a/46/06cdef71dd03acd9da7f51ab3a9107318aee12ad38d273f654e4f981583a/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd", size = 1696790, upload-time = "2025-07-29T05:51:03.657Z" }, - { url = "https://files.pythonhosted.org/packages/02/90/6b4cfaaf92ed98d0ec4d173e78b99b4b1a7551250be8937d9d67ecb356b4/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f", size = 1718245, upload-time = "2025-07-29T05:51:05.911Z" }, - { url = "https://files.pythonhosted.org/packages/2e/e6/2593751670fa06f080a846f37f112cbe6f873ba510d070136a6ed46117c6/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d", size = 1658899, upload-time = "2025-07-29T05:51:07.753Z" }, - { url = "https://files.pythonhosted.org/packages/8f/28/c15bacbdb8b8eb5bf39b10680d129ea7410b859e379b03190f02fa104ffd/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519", size = 1738459, upload-time = "2025-07-29T05:51:09.56Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/c269cbc4faa01fb10f143b1670633a8ddd5b2e1ffd0548f7aa49cb5c70e2/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea", size = 1766434, upload-time = "2025-07-29T05:51:11.423Z" }, - { url = "https://files.pythonhosted.org/packages/52/b0/4ff3abd81aa7d929b27d2e1403722a65fc87b763e3a97b3a2a494bfc63bc/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3", size = 1726045, upload-time = "2025-07-29T05:51:13.689Z" }, - { url = "https://files.pythonhosted.org/packages/71/16/949225a6a2dd6efcbd855fbd90cf476052e648fb011aa538e3b15b89a57a/aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1", size = 423591, upload-time = "2025-07-29T05:51:15.452Z" }, - { url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" }, + { url = "https://files.pythonhosted.org/packages/b1/db/df80cacac46cd548a736c5535b13cc18925cf6f9f83cd128cf3839842219/aiohttp-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:99eb94e97a42367fef5fc11e28cb2362809d3e70837f6e60557816c7106e2e20", size = 741374, upload-time = "2025-10-06T19:55:13.095Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f9/2d6d93fd57ab4726e18a7cdab083772eda8302d682620fbf2aef48322351/aiohttp-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4696665b2713021c6eba3e2b882a86013763b442577fe5d2056a42111e732eca", size = 494956, upload-time = "2025-10-06T19:55:14.687Z" }, + { url = "https://files.pythonhosted.org/packages/89/a6/e1c061b079fed04ffd6777950c82f2e8246fd08b7b3c4f56fdd47f697e5a/aiohttp-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3e6a38366f7f0d0f6ed7a1198055150c52fda552b107dad4785c0852ad7685d1", size = 491154, upload-time = "2025-10-06T19:55:16.661Z" }, + { url = "https://files.pythonhosted.org/packages/fe/4d/ee8913c0d2c7da37fdc98673a342b51611eaa0871682b37b8430084e35b5/aiohttp-3.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aab715b1a0c37f7f11f9f1f579c6fbaa51ef569e47e3c0a4644fba46077a9409", size = 1745707, upload-time = "2025-10-06T19:55:18.376Z" }, + { url = "https://files.pythonhosted.org/packages/f9/70/26b2c97e8fa68644aec43d788940984c5f3b53a8d1468d5baaa328f809c9/aiohttp-3.13.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7972c82bed87d7bd8e374b60a6b6e816d75ba4f7c2627c2d14eed216e62738e1", size = 1702404, upload-time = "2025-10-06T19:55:20.098Z" }, + { url = "https://files.pythonhosted.org/packages/65/1e/c8aa3c293a0e8b18968b1b88e9bd8fb269eb67eb7449f504a4c3e175b159/aiohttp-3.13.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca8313cb852af788c78d5afdea24c40172cbfff8b35e58b407467732fde20390", size = 1805519, upload-time = "2025-10-06T19:55:21.811Z" }, + { url = "https://files.pythonhosted.org/packages/51/b6/a3753fe86249eb441768658cfc00f8c4e0913b255c13be00ddb8192775e1/aiohttp-3.13.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c333a2385d2a6298265f4b3e960590f787311b87f6b5e6e21bb8375914ef504", size = 1893904, upload-time = "2025-10-06T19:55:23.462Z" }, + { url = "https://files.pythonhosted.org/packages/51/6d/7b1e020fe1d2a2be7cf0ce5e35922f345e3507cf337faa1a6563c42065c1/aiohttp-3.13.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc6d5fc5edbfb8041d9607f6a417997fa4d02de78284d386bea7ab767b5ea4f3", size = 1745043, upload-time = "2025-10-06T19:55:25.208Z" }, + { url = "https://files.pythonhosted.org/packages/e6/df/aad5dce268f9d4f29759c3eeb5fb5995c569d76abb267468dc1075218d5b/aiohttp-3.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ddedba3d0043349edc79df3dc2da49c72b06d59a45a42c1c8d987e6b8d175b8", size = 1604765, upload-time = "2025-10-06T19:55:27.157Z" }, + { url = "https://files.pythonhosted.org/packages/1c/19/a84a0e97b2da2224c8b85e1aef5cac834d07b2903c17bff1a6bdbc7041d2/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23ca762140159417a6bbc959ca1927f6949711851e56f2181ddfe8d63512b5ad", size = 1721737, upload-time = "2025-10-06T19:55:28.854Z" }, + { url = "https://files.pythonhosted.org/packages/6c/61/ca6ad390128d964a08554fd63d6df5810fb5fbc7e599cb9e617f1729ae19/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bfe824d6707a5dc3c5676685f624bc0c63c40d79dc0239a7fd6c034b98c25ebe", size = 1716052, upload-time = "2025-10-06T19:55:30.563Z" }, + { url = "https://files.pythonhosted.org/packages/2a/71/769e249e6625372c7d14be79b8b8c3b0592963a09793fb3d36758e60952c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3c11fa5dd2ef773a8a5a6daa40243d83b450915992eab021789498dc87acc114", size = 1783532, upload-time = "2025-10-06T19:55:32.798Z" }, + { url = "https://files.pythonhosted.org/packages/66/64/b9cd03cdbb629bc492e4a744fbe96550a8340b0cd7a0cc4a9c90cfecd8d3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00fdfe370cffede3163ba9d3f190b32c0cfc8c774f6f67395683d7b0e48cdb8a", size = 1593072, upload-time = "2025-10-06T19:55:34.686Z" }, + { url = "https://files.pythonhosted.org/packages/24/0e/87922c8cfdbd09f5e2197e9d87714a98c99c423560d44739e3af55400fe3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6475e42ef92717a678bfbf50885a682bb360a6f9c8819fb1a388d98198fdcb80", size = 1798613, upload-time = "2025-10-06T19:55:36.393Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/a3adfe2af76e1ee9e3b5464522004b148b266bc99d7ec424ca7843d64a3c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:77da5305a410910218b99f2a963092f4277d8a9c1f429c1ff1b026d1826bd0b6", size = 1737480, upload-time = "2025-10-06T19:55:38.043Z" }, + { url = "https://files.pythonhosted.org/packages/ad/53/e124dcbd64e6365602f3493fe37a11ca5b7ac0a40822a6e2bc8260cd08e0/aiohttp-3.13.0-cp311-cp311-win32.whl", hash = "sha256:2f9d9ea547618d907f2ee6670c9a951f059c5994e4b6de8dcf7d9747b420c820", size = 429824, upload-time = "2025-10-06T19:55:39.595Z" }, + { url = "https://files.pythonhosted.org/packages/3e/bd/485d98b372a2cd6998484a93ddd401ec6b6031657661c36846a10e2a1f6e/aiohttp-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f19f7798996d4458c669bd770504f710014926e9970f4729cf55853ae200469", size = 454137, upload-time = "2025-10-06T19:55:41.617Z" }, + { url = "https://files.pythonhosted.org/packages/3a/95/7e8bdfa6e79099a086d59d42589492f1fe9d29aae3cefb58b676015ce278/aiohttp-3.13.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1c272a9a18a5ecc48a7101882230046b83023bb2a662050ecb9bfcb28d9ab53a", size = 735585, upload-time = "2025-10-06T19:55:43.401Z" }, + { url = "https://files.pythonhosted.org/packages/9f/20/2f1d3ee06ee94eafe516810705219bff234d09f135d6951661661d5595ae/aiohttp-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:97891a23d7fd4e1afe9c2f4473e04595e4acb18e4733b910b6577b74e7e21985", size = 490613, upload-time = "2025-10-06T19:55:45.237Z" }, + { url = "https://files.pythonhosted.org/packages/74/15/ab8600ef6dc1dcd599009a81acfed2ea407037e654d32e47e344e0b08c34/aiohttp-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:475bd56492ce5f4cffe32b5533c6533ee0c406d1d0e6924879f83adcf51da0ae", size = 489750, upload-time = "2025-10-06T19:55:46.937Z" }, + { url = "https://files.pythonhosted.org/packages/33/59/752640c2b86ca987fe5703a01733b00d375e6cd2392bc7574489934e64e5/aiohttp-3.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c32ada0abb4bc94c30be2b681c42f058ab104d048da6f0148280a51ce98add8c", size = 1736812, upload-time = "2025-10-06T19:55:48.917Z" }, + { url = "https://files.pythonhosted.org/packages/3d/c6/dd6b86ddb852a7fdbcdc7a45b6bdc80178aef713c08279afcaee7a5a9f07/aiohttp-3.13.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4af1f8877ca46ecdd0bc0d4a6b66d4b2bddc84a79e2e8366bc0d5308e76bceb8", size = 1698535, upload-time = "2025-10-06T19:55:50.75Z" }, + { url = "https://files.pythonhosted.org/packages/33/e2/27c92d205b9e8cee7661670e8e9f187931b71e26d42796b153d2a0ba6949/aiohttp-3.13.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e04ab827ec4f775817736b20cdc8350f40327f9b598dec4e18c9ffdcbea88a93", size = 1766573, upload-time = "2025-10-06T19:55:53.106Z" }, + { url = "https://files.pythonhosted.org/packages/df/6a/1fc1ad71d130a30f7a207d8d958a41224c29b834463b5185efb2dbff6ad4/aiohttp-3.13.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a6d9487b9471ec36b0faedf52228cd732e89be0a2bbd649af890b5e2ce422353", size = 1865229, upload-time = "2025-10-06T19:55:55.01Z" }, + { url = "https://files.pythonhosted.org/packages/14/51/d0c1701a79fcb0109cff5304da16226581569b89a282d8e7f1549a7e3ec0/aiohttp-3.13.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e66c57416352f36bf98f6641ddadd47c93740a22af7150d3e9a1ef6e983f9a8", size = 1750379, upload-time = "2025-10-06T19:55:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3d/2ec4b934f85856de1c0c18e90adc8902adadbfac2b3c0b831bfeb7214fc8/aiohttp-3.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:469167d5372f5bb3aedff4fc53035d593884fff2617a75317740e885acd48b04", size = 1560798, upload-time = "2025-10-06T19:55:58.888Z" }, + { url = "https://files.pythonhosted.org/packages/38/56/e23d9c3e13006e599fdce3851517c70279e177871e3e567d22cf3baf5d6c/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a9f3546b503975a69b547c9fd1582cad10ede1ce6f3e313a2f547c73a3d7814f", size = 1697552, upload-time = "2025-10-06T19:56:01.172Z" }, + { url = "https://files.pythonhosted.org/packages/56/cb/caa32c2ccaeca0a3dc39129079fd2ad02f9406c3a5f7924340435b87d4cd/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6b4174fcec98601f0cfdf308ee29a6ae53c55f14359e848dab4e94009112ee7d", size = 1718609, upload-time = "2025-10-06T19:56:03.102Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c0/5911856fef9e40fd1ccbb8c54a90116875d5753a92c1cac66ce2059b390d/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a533873a7a4ec2270fb362ee5a0d3b98752e4e1dc9042b257cd54545a96bd8ed", size = 1735887, upload-time = "2025-10-06T19:56:04.841Z" }, + { url = "https://files.pythonhosted.org/packages/0e/48/8d6f4757a24c02f0a454c043556593a00645d10583859f7156db44d8b7d3/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ce887c5e54411d607ee0959cac15bb31d506d86a9bcaddf0b7e9d63325a7a802", size = 1553079, upload-time = "2025-10-06T19:56:07.197Z" }, + { url = "https://files.pythonhosted.org/packages/39/fa/e82c9445e40b50e46770702b5b6ca2f767966d53e1a5eef03583ceac6df6/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d871f6a30d43e32fc9252dc7b9febe1a042b3ff3908aa83868d7cf7c9579a59b", size = 1762750, upload-time = "2025-10-06T19:56:09.376Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e6/9d30554e7f1e700bfeae4ab6b153d5dc7441606a9ec5e929288fa93a1477/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:222c828243b4789d79a706a876910f656fad4381661691220ba57b2ab4547865", size = 1717461, upload-time = "2025-10-06T19:56:11.551Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e5/29cca547990a59ea54f0674fc01de98519fc628cfceeab6175711750eca7/aiohttp-3.13.0-cp312-cp312-win32.whl", hash = "sha256:682d2e434ff2f1108314ff7f056ce44e457f12dbed0249b24e106e385cf154b9", size = 424633, upload-time = "2025-10-06T19:56:13.316Z" }, + { url = "https://files.pythonhosted.org/packages/8b/68/46dd042d7bc62eab30bafdb8569f55ef125c3a88bb174270324224f8df56/aiohttp-3.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a2be20eb23888df130214b91c262a90e2de1553d6fb7de9e9010cec994c0ff2", size = 451401, upload-time = "2025-10-06T19:56:15.188Z" }, ] [[package]] @@ -118,16 +124,16 @@ wheels = [ [[package]] name = "alembic" -version = "1.16.5" +version = "1.17.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mako" }, { name = "sqlalchemy" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/ca/4dc52902cf3491892d464f5265a81e9dff094692c8a049a3ed6a05fe7ee8/alembic-1.16.5.tar.gz", hash = "sha256:a88bb7f6e513bd4301ecf4c7f2206fe93f9913f9b48dac3b78babde2d6fe765e", size = 1969868, upload-time = "2025-08-27T18:02:05.668Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/45/6f4555f2039f364c3ce31399529dcf48dd60726ff3715ad67f547d87dfd2/alembic-1.17.0.tar.gz", hash = "sha256:4652a0b3e19616b57d652b82bfa5e38bf5dbea0813eed971612671cb9e90c0fe", size = 1975526, upload-time = "2025-10-11T18:40:13.585Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/4a/4c61d4c84cfd9befb6fa08a702535b27b21fff08c946bc2f6139decbf7f7/alembic-1.16.5-py3-none-any.whl", hash = "sha256:e845dfe090c5ffa7b92593ae6687c5cb1a101e91fa53868497dbd79847f9dbe3", size = 247355, upload-time = "2025-08-27T18:02:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/44/1f/38e29b06bfed7818ebba1f84904afdc8153ef7b6c7e0d8f3bc6643f5989c/alembic-1.17.0-py3-none-any.whl", hash = "sha256:80523bc437d41b35c5db7e525ad9d908f79de65c27d6a5a5eab6df348a352d99", size = 247449, upload-time = "2025-10-11T18:40:16.288Z" }, ] [[package]] @@ -333,16 +339,16 @@ wheels = [ [[package]] name = "anyio" -version = "4.10.0" +version = "4.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, ] [[package]] @@ -377,11 +383,11 @@ wheels = [ [[package]] name = "asgiref" -version = "3.9.1" +version = "3.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/90/61/0aa957eec22ff70b830b22ff91f825e70e1ef732c06666a805730f28b36b/asgiref-3.9.1.tar.gz", hash = "sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142", size = 36870, upload-time = "2025-07-08T09:07:43.344Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/08/4dfec9b90758a59acc6be32ac82e98d1fbfc321cb5cfa410436dbacf821c/asgiref-3.10.0.tar.gz", hash = "sha256:d89f2d8cd8b56dada7d52fa7dc8075baa08fb836560710d38c292a7a3f78c04e", size = 37483, upload-time = "2025-10-05T09:15:06.557Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790, upload-time = "2025-07-08T09:07:41.548Z" }, + { url = "https://files.pythonhosted.org/packages/17/9c/fc2331f538fbf7eedba64b2052e99ccf9ba9d6888e2f41441ee28847004b/asgiref-3.10.0-py3-none-any.whl", hash = "sha256:aef8a81283a34d0ab31630c9b7dfe70c812c95eba78171367ca8745e88124734", size = 24050, upload-time = "2025-10-05T09:15:05.11Z" }, ] [[package]] @@ -395,23 +401,23 @@ wheels = [ [[package]] name = "attrs" -version = "25.3.0" +version = "25.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] [[package]] name = "authlib" -version = "1.6.4" +version = "1.6.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/bb/73a1f1c64ee527877f64122422dafe5b87a846ccf4ac933fe21bcbb8fee8/authlib-1.6.4.tar.gz", hash = "sha256:104b0442a43061dc8bc23b133d1d06a2b0a9c2e3e33f34c4338929e816287649", size = 164046, upload-time = "2025-09-17T09:59:23.897Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/3f/1d3bbd0bf23bdd99276d4def22f29c27a914067b4cf66f753ff9b8bbd0f3/authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b", size = 164553, upload-time = "2025-10-02T13:36:09.489Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/aa/91355b5f539caf1b94f0e66ff1e4ee39373b757fce08204981f7829ede51/authlib-1.6.4-py2.py3-none-any.whl", hash = "sha256:39313d2a2caac3ecf6d8f95fbebdfd30ae6ea6ae6a6db794d976405fdd9aa796", size = 243076, upload-time = "2025-09-17T09:59:22.259Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/5082412d1ee302e9e7d80b6949bc4d2a8fa1149aaab610c5fc24709605d6/authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a", size = 243608, upload-time = "2025-10-02T13:36:07.637Z" }, ] [[package]] @@ -469,68 +475,70 @@ wheels = [ [[package]] name = "basedpyright" -version = "1.31.4" +version = "1.31.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodejs-wheel-binaries" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/53/570b03ec0445a9b2cc69788482c1d12902a9b88a9b159e449c4c537c4e3a/basedpyright-1.31.4.tar.gz", hash = "sha256:2450deb16530f7c88c1a7da04530a079f9b0b18ae1c71cb6f812825b3b82d0b1", size = 22494467, upload-time = "2025-09-03T13:05:55.817Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/ba/ed69e8df732a09c8ca469f592c8e08707fe29149735b834c276d94d4a3da/basedpyright-1.31.7.tar.gz", hash = "sha256:394f334c742a19bcc5905b2455c9f5858182866b7679a6f057a70b44b049bceb", size = 22710948, upload-time = "2025-10-11T05:12:48.3Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/40/d1047a5addcade9291685d06ef42a63c1347517018bafd82747af9da0294/basedpyright-1.31.4-py3-none-any.whl", hash = "sha256:055e4a38024bd653be12d6216c1cfdbee49a1096d342b4d5f5b4560f7714b6fc", size = 11731440, upload-time = "2025-09-03T13:05:52.308Z" }, + { url = "https://files.pythonhosted.org/packages/f8/90/ce01ad2d0afdc1b82b8b5aaba27e60d2e138e39d887e71c35c55d8f1bfcd/basedpyright-1.31.7-py3-none-any.whl", hash = "sha256:7c54beb7828c9ed0028630aaa6904f395c27e5a9f5a313aa9e91fc1d11170831", size = 11817571, upload-time = "2025-10-11T05:12:45.432Z" }, ] [[package]] name = "bce-python-sdk" -version = "0.9.45" +version = "0.9.46" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "future" }, { name = "pycryptodome" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/53/19/0f23aedecb980288e663ba9ce81fa1545d6331d62bd75262fca49678052d/bce_python_sdk-0.9.45.tar.gz", hash = "sha256:ba60d66e80fcd012a6362bf011fee18bca616b0005814d261aba3aa202f7025f", size = 252769, upload-time = "2025-08-28T10:24:54.303Z" } +sdist = { url = "https://files.pythonhosted.org/packages/53/57/f98bc15c12cc022ef195f689ee57ed61d8a8677bda3089c4d58fb1872d45/bce_python_sdk-0.9.46.tar.gz", hash = "sha256:4bf01b22e6d172ccd94aa201f8bc6f2a98d0da4784160e77cfacfcc71c2686be", size = 253806, upload-time = "2025-09-15T06:51:52.753Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/1f/d3fd91808a1f4881b4072424390d38e85707edd75ed5d9cea2a0299a7a7a/bce_python_sdk-0.9.45-py3-none-any.whl", hash = "sha256:cce3ca7ad4de8be2cc0722c1d6a7db7be6f2833f8d9ca7f892c572e6ff78a959", size = 352012, upload-time = "2025-08-28T10:24:52.387Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f5/20e9ab324b22a77970c57bc8267e586e85e2aa1277d80f2c58ca8a39a13e/bce_python_sdk-0.9.46-py3-none-any.whl", hash = "sha256:655074da6592ce8b036f605d9a272bfdcd1f515eb2f8e3f0333bb7cc62f700cb", size = 352622, upload-time = "2025-09-15T06:51:50.811Z" }, ] [[package]] name = "bcrypt" -version = "4.3.0" +version = "5.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/5d/6d7433e0f3cd46ce0b43cd65e1db465ea024dbb8216fb2404e919c2ad77b/bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18", size = 25697, upload-time = "2025-02-28T01:24:09.174Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/22/5ada0b9af72b60cbc4c9a399fdde4af0feaa609d27eb0adc61607997a3fa/bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d", size = 498019, upload-time = "2025-02-28T01:23:05.838Z" }, - { url = "https://files.pythonhosted.org/packages/b8/8c/252a1edc598dc1ce57905be173328eda073083826955ee3c97c7ff5ba584/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b", size = 279174, upload-time = "2025-02-28T01:23:07.274Z" }, - { url = "https://files.pythonhosted.org/packages/29/5b/4547d5c49b85f0337c13929f2ccbe08b7283069eea3550a457914fc078aa/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e", size = 283870, upload-time = "2025-02-28T01:23:09.151Z" }, - { url = "https://files.pythonhosted.org/packages/be/21/7dbaf3fa1745cb63f776bb046e481fbababd7d344c5324eab47f5ca92dd2/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59", size = 279601, upload-time = "2025-02-28T01:23:11.461Z" }, - { url = "https://files.pythonhosted.org/packages/6d/64/e042fc8262e971347d9230d9abbe70d68b0a549acd8611c83cebd3eaec67/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753", size = 297660, upload-time = "2025-02-28T01:23:12.989Z" }, - { url = "https://files.pythonhosted.org/packages/50/b8/6294eb84a3fef3b67c69b4470fcdd5326676806bf2519cda79331ab3c3a9/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761", size = 284083, upload-time = "2025-02-28T01:23:14.5Z" }, - { url = "https://files.pythonhosted.org/packages/62/e6/baff635a4f2c42e8788fe1b1633911c38551ecca9a749d1052d296329da6/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb", size = 279237, upload-time = "2025-02-28T01:23:16.686Z" }, - { url = "https://files.pythonhosted.org/packages/39/48/46f623f1b0c7dc2e5de0b8af5e6f5ac4cc26408ac33f3d424e5ad8da4a90/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d", size = 283737, upload-time = "2025-02-28T01:23:18.897Z" }, - { url = "https://files.pythonhosted.org/packages/49/8b/70671c3ce9c0fca4a6cc3cc6ccbaa7e948875a2e62cbd146e04a4011899c/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f", size = 312741, upload-time = "2025-02-28T01:23:21.041Z" }, - { url = "https://files.pythonhosted.org/packages/27/fb/910d3a1caa2d249b6040a5caf9f9866c52114d51523ac2fb47578a27faee/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732", size = 316472, upload-time = "2025-02-28T01:23:23.183Z" }, - { url = "https://files.pythonhosted.org/packages/dc/cf/7cf3a05b66ce466cfb575dbbda39718d45a609daa78500f57fa9f36fa3c0/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef", size = 343606, upload-time = "2025-02-28T01:23:25.361Z" }, - { url = "https://files.pythonhosted.org/packages/e3/b8/e970ecc6d7e355c0d892b7f733480f4aa8509f99b33e71550242cf0b7e63/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304", size = 362867, upload-time = "2025-02-28T01:23:26.875Z" }, - { url = "https://files.pythonhosted.org/packages/a9/97/8d3118efd8354c555a3422d544163f40d9f236be5b96c714086463f11699/bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51", size = 160589, upload-time = "2025-02-28T01:23:28.381Z" }, - { url = "https://files.pythonhosted.org/packages/29/07/416f0b99f7f3997c69815365babbc2e8754181a4b1899d921b3c7d5b6f12/bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62", size = 152794, upload-time = "2025-02-28T01:23:30.187Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c1/3fa0e9e4e0bfd3fd77eb8b52ec198fd6e1fd7e9402052e43f23483f956dd/bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3", size = 498969, upload-time = "2025-02-28T01:23:31.945Z" }, - { url = "https://files.pythonhosted.org/packages/ce/d4/755ce19b6743394787fbd7dff6bf271b27ee9b5912a97242e3caf125885b/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24", size = 279158, upload-time = "2025-02-28T01:23:34.161Z" }, - { url = "https://files.pythonhosted.org/packages/9b/5d/805ef1a749c965c46b28285dfb5cd272a7ed9fa971f970435a5133250182/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef", size = 284285, upload-time = "2025-02-28T01:23:35.765Z" }, - { url = "https://files.pythonhosted.org/packages/ab/2b/698580547a4a4988e415721b71eb45e80c879f0fb04a62da131f45987b96/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b", size = 279583, upload-time = "2025-02-28T01:23:38.021Z" }, - { url = "https://files.pythonhosted.org/packages/f2/87/62e1e426418204db520f955ffd06f1efd389feca893dad7095bf35612eec/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676", size = 297896, upload-time = "2025-02-28T01:23:39.575Z" }, - { url = "https://files.pythonhosted.org/packages/cb/c6/8fedca4c2ada1b6e889c52d2943b2f968d3427e5d65f595620ec4c06fa2f/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1", size = 284492, upload-time = "2025-02-28T01:23:40.901Z" }, - { url = "https://files.pythonhosted.org/packages/4d/4d/c43332dcaaddb7710a8ff5269fcccba97ed3c85987ddaa808db084267b9a/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe", size = 279213, upload-time = "2025-02-28T01:23:42.653Z" }, - { url = "https://files.pythonhosted.org/packages/dc/7f/1e36379e169a7df3a14a1c160a49b7b918600a6008de43ff20d479e6f4b5/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0", size = 284162, upload-time = "2025-02-28T01:23:43.964Z" }, - { url = "https://files.pythonhosted.org/packages/1c/0a/644b2731194b0d7646f3210dc4d80c7fee3ecb3a1f791a6e0ae6bb8684e3/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f", size = 312856, upload-time = "2025-02-28T01:23:46.011Z" }, - { url = "https://files.pythonhosted.org/packages/dc/62/2a871837c0bb6ab0c9a88bf54de0fc021a6a08832d4ea313ed92a669d437/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23", size = 316726, upload-time = "2025-02-28T01:23:47.575Z" }, - { url = "https://files.pythonhosted.org/packages/0c/a1/9898ea3faac0b156d457fd73a3cb9c2855c6fd063e44b8522925cdd8ce46/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe", size = 343664, upload-time = "2025-02-28T01:23:49.059Z" }, - { url = "https://files.pythonhosted.org/packages/40/f2/71b4ed65ce38982ecdda0ff20c3ad1b15e71949c78b2c053df53629ce940/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505", size = 363128, upload-time = "2025-02-28T01:23:50.399Z" }, - { url = "https://files.pythonhosted.org/packages/11/99/12f6a58eca6dea4be992d6c681b7ec9410a1d9f5cf368c61437e31daa879/bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a", size = 160598, upload-time = "2025-02-28T01:23:51.775Z" }, - { url = "https://files.pythonhosted.org/packages/a9/cf/45fb5261ece3e6b9817d3d82b2f343a505fd58674a92577923bc500bd1aa/bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b", size = 152799, upload-time = "2025-02-28T01:23:53.139Z" }, - { url = "https://files.pythonhosted.org/packages/4c/b1/1289e21d710496b88340369137cc4c5f6ee036401190ea116a7b4ae6d32a/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a", size = 275103, upload-time = "2025-02-28T01:24:00.764Z" }, - { url = "https://files.pythonhosted.org/packages/94/41/19be9fe17e4ffc5d10b7b67f10e459fc4eee6ffe9056a88de511920cfd8d/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce", size = 280513, upload-time = "2025-02-28T01:24:02.243Z" }, - { url = "https://files.pythonhosted.org/packages/aa/73/05687a9ef89edebdd8ad7474c16d8af685eb4591c3c38300bb6aad4f0076/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8", size = 274685, upload-time = "2025-02-28T01:24:04.512Z" }, - { url = "https://files.pythonhosted.org/packages/63/13/47bba97924ebe86a62ef83dc75b7c8a881d53c535f83e2c54c4bd701e05c/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938", size = 280110, upload-time = "2025-02-28T01:24:05.896Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" }, + { url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" }, + { url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" }, + { url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" }, + { url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" }, + { url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" }, + { url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" }, + { url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" }, + { url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" }, + { url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" }, + { url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" }, + { url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" }, + { url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" }, + { url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" }, + { url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" }, + { url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" }, + { url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" }, + { url = "https://files.pythonhosted.org/packages/8a/75/4aa9f5a4d40d762892066ba1046000b329c7cd58e888a6db878019b282dc/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534", size = 271180, upload-time = "2025-09-25T19:50:38.575Z" }, + { url = "https://files.pythonhosted.org/packages/54/79/875f9558179573d40a9cc743038ac2bf67dfb79cecb1e8b5d70e88c94c3d/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4", size = 273791, upload-time = "2025-09-25T19:50:39.913Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fe/975adb8c216174bf70fc17535f75e85ac06ed5252ea077be10d9cff5ce24/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911", size = 270746, upload-time = "2025-09-25T19:50:43.306Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", size = 273375, upload-time = "2025-09-25T19:50:45.43Z" }, ] [[package]] @@ -547,11 +555,11 @@ wheels = [ [[package]] name = "billiard" -version = "4.2.1" +version = "4.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/58/1546c970afcd2a2428b1bfafecf2371d8951cc34b46701bea73f4280989e/billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f", size = 155031, upload-time = "2024-09-21T13:40:22.491Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/6a/1405343016bce8354b29d90aad6b0bf6485b5e60404516e4b9a3a9646cf0/billiard-4.2.2.tar.gz", hash = "sha256:e815017a062b714958463e07ba15981d802dc53d41c5b69d28c5a7c238f8ecf3", size = 155592, upload-time = "2025-09-20T14:44:40.456Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/da/43b15f28fe5f9e027b41c539abc5469052e9d48fd75f8ff094ba2a0ae767/billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb", size = 86766, upload-time = "2024-09-21T13:40:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/a6/80/ef8dff49aae0e4430f81842f7403e14e0ca59db7bbaf7af41245b67c6b25/billiard-4.2.2-py3-none-any.whl", hash = "sha256:4bc05dcf0d1cc6addef470723aac2a6232f3c7ed7475b0b580473a9145829457", size = 86896, upload-time = "2025-09-20T14:44:39.157Z" }, ] [[package]] @@ -579,16 +587,16 @@ wheels = [ [[package]] name = "boto3-stubs" -version = "1.40.35" +version = "1.40.51" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore-stubs" }, { name = "types-s3transfer" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/24/18/6a64ff9603845d635f6167b6d9a3f9a6e658d8a28eef36f8423eb5a99ae1/boto3_stubs-1.40.35.tar.gz", hash = "sha256:2d6f2dbe6e9b42deb7b8fbeed051461e7906903f26e99634d00be45cc40db41a", size = 100819, upload-time = "2025-09-19T19:42:36.372Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/4d/b07f9ee0fe432fa8ec6dc368ee7a0409e2b6d9df2c5a2a88265c9b6fd878/boto3_stubs-1.40.51.tar.gz", hash = "sha256:0281e820813a310954e15fb7c1d470c24c34c1cccc7b1ddad977fa293a1080a9", size = 100890, upload-time = "2025-10-13T19:25:36.126Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/d4/d744260908ad55903baefa086a3c9cabc50bfafd63c3f2d0e05688378013/boto3_stubs-1.40.35-py3-none-any.whl", hash = "sha256:2bb44e6c17831650a28e3e00bf5be0a6ba771fce08724ba978ffcd06a7bca7e3", size = 69689, upload-time = "2025-09-19T19:42:30.08Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2e/4476431f11fc3bf7a7e0f4f5c275f17607aa127da7c0d8685a4dc6bf6291/boto3_stubs-1.40.51-py3-none-any.whl", hash = "sha256:896d0ffaa298ce1749eea1a54946320a0f4e07c6912f8e1f8c0744a708ee25a4", size = 69709, upload-time = "2025-10-13T19:25:23.116Z" }, ] [package.optional-dependencies] @@ -612,14 +620,14 @@ wheels = [ [[package]] name = "botocore-stubs" -version = "1.40.29" +version = "1.40.51" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-awscrt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/32/5c/49b2860e2a26b7383d5915374e61d962a3853e3fd569e4370444f0b902c0/botocore_stubs-1.40.29.tar.gz", hash = "sha256:324669d5ed7b5f7271bf3c3ea7208191b1d183f17d7e73398f11fef4a31fdf6b", size = 42742, upload-time = "2025-09-11T20:22:35.451Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/ca/429fadb6e037cb7b300d508a0b24b59a71961db12539e21749cbec7e7422/botocore_stubs-1.40.51.tar.gz", hash = "sha256:8ddbeb1f68e39382533bb53f3b968d29e640406016af00ad8bbd6e1a2bd59536", size = 42249, upload-time = "2025-10-13T20:26:57.777Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/3c/f901ca6c4d66e0bebbfc56e614fc214416db72c613f768ee2fc84ffdbff4/botocore_stubs-1.40.29-py3-none-any.whl", hash = "sha256:84cbcc6328dddaa1f825830f7dec8fa0dcd3bac8002211322e8529cbfb5eaddd", size = 66843, upload-time = "2025-09-11T20:22:32.576Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b9/5f1296bc46f293f284a1a6259f3c1f21f4161088dc6f70428698841b56a7/botocore_stubs-1.40.51-py3-none-any.whl", hash = "sha256:9a028104979205c9be0b68bb59ba679e4fe452e017eec3d40f6c2b41c590a73c", size = 66541, upload-time = "2025-10-13T20:26:55.559Z" }, ] [[package]] @@ -694,7 +702,7 @@ name = "brotlicffi" version = "1.1.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cffi" }, + { name = "cffi", marker = "platform_python_implementation == 'PyPy'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/95/9d/70caa61192f570fcf0352766331b735afa931b4c6bc9a348a0925cc13288/brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13", size = 465192, upload-time = "2023-09-14T14:22:40.707Z" } wheels = [ @@ -774,11 +782,11 @@ wheels = [ [[package]] name = "certifi" -version = "2025.8.3" +version = "2025.10.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, ] [[package]] @@ -927,14 +935,14 @@ wheels = [ [[package]] name = "click" -version = "8.2.1" +version = "8.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, ] [[package]] @@ -1145,32 +1153,33 @@ toml = [ [[package]] name = "crc32c" -version = "2.7.1" +version = "2.7.1.post0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/4c/4e40cc26347ac8254d3f25b9f94710b8e8df24ee4dddc1ba41907a88a94d/crc32c-2.7.1.tar.gz", hash = "sha256:f91b144a21eef834d64178e01982bb9179c354b3e9e5f4c803b0e5096384968c", size = 45712, upload-time = "2024-09-24T06:20:17.553Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/02/5e49cc17a5f6f8cb78b55dd57d50b36416e69051c29bba1eab3e86a01927/crc32c-2.7.1.post0.tar.gz", hash = "sha256:dcaa776413af5790cc55561469cd76306e97b325fe4aa195db535fb3f328e709", size = 46574, upload-time = "2025-10-13T02:06:16.898Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/8e/2f37f46368bbfd50edfc11b96f0aa135699034b1b020966c70ebaff3463b/crc32c-2.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:19e03a50545a3ef400bd41667d5525f71030488629c57d819e2dd45064f16192", size = 49672, upload-time = "2024-09-24T06:18:18.032Z" }, - { url = "https://files.pythonhosted.org/packages/ed/b8/e52f7c4b045b871c2984d70f37c31d4861b533a8082912dfd107a96cf7c1/crc32c-2.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c03286b1e5ce9bed7090084f206aacd87c5146b4b10de56fe9e86cbbbf851cf", size = 37155, upload-time = "2024-09-24T06:18:19.373Z" }, - { url = "https://files.pythonhosted.org/packages/25/ee/0cfa82a68736697f3c7e435ba658c2ef8c997f42b89f6ab4545efe1b2649/crc32c-2.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80ebbf144a1a56a532b353e81fa0f3edca4f4baa1bf92b1dde2c663a32bb6a15", size = 35372, upload-time = "2024-09-24T06:18:20.983Z" }, - { url = "https://files.pythonhosted.org/packages/aa/92/c878aaba81c431fcd93a059e9f6c90db397c585742793f0bf6e0c531cc67/crc32c-2.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96b794fd11945298fdd5eb1290a812efb497c14bc42592c5c992ca077458eeba", size = 54879, upload-time = "2024-09-24T06:18:23.085Z" }, - { url = "https://files.pythonhosted.org/packages/5b/f5/ab828ab3907095e06b18918408748950a9f726ee2b37be1b0839fb925ee1/crc32c-2.7.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9df7194dd3c0efb5a21f5d70595b7a8b4fd9921fbbd597d6d8e7a11eca3e2d27", size = 52588, upload-time = "2024-09-24T06:18:24.463Z" }, - { url = "https://files.pythonhosted.org/packages/6a/2b/9e29e9ac4c4213d60491db09487125db358cd9263490fbadbd55e48fbe03/crc32c-2.7.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d698eec444b18e296a104d0b9bb6c596c38bdcb79d24eba49604636e9d747305", size = 53674, upload-time = "2024-09-24T06:18:25.624Z" }, - { url = "https://files.pythonhosted.org/packages/79/ed/df3c4c14bf1b29f5c9b52d51fb6793e39efcffd80b2941d994e8f7f5f688/crc32c-2.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e07cf10ef852d219d179333fd706d1c415626f1f05e60bd75acf0143a4d8b225", size = 54691, upload-time = "2024-09-24T06:18:26.578Z" }, - { url = "https://files.pythonhosted.org/packages/0c/47/4917af3c9c1df2fff28bbfa6492673c9adeae5599dcc207bbe209847489c/crc32c-2.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d2a051f296e6e92e13efee3b41db388931cdb4a2800656cd1ed1d9fe4f13a086", size = 52896, upload-time = "2024-09-24T06:18:28.174Z" }, - { url = "https://files.pythonhosted.org/packages/1b/6f/26fc3dda5835cda8f6cd9d856afe62bdeae428de4c34fea200b0888e8835/crc32c-2.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1738259802978cdf428f74156175da6a5fdfb7256f647fdc0c9de1bc6cd7173", size = 53554, upload-time = "2024-09-24T06:18:29.104Z" }, - { url = "https://files.pythonhosted.org/packages/56/3e/6f39127f7027c75d130c0ba348d86a6150dff23761fbc6a5f71659f4521e/crc32c-2.7.1-cp311-cp311-win32.whl", hash = "sha256:f7786d219a1a1bf27d0aa1869821d11a6f8e90415cfffc1e37791690d4a848a1", size = 38370, upload-time = "2024-09-24T06:18:30.013Z" }, - { url = "https://files.pythonhosted.org/packages/c9/fb/1587c2705a3a47a3d0067eecf9a6fec510761c96dec45c7b038fb5c8ff46/crc32c-2.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:887f6844bb3ad35f0778cd10793ad217f7123a5422e40041231b8c4c7329649d", size = 39795, upload-time = "2024-09-24T06:18:31.324Z" }, - { url = "https://files.pythonhosted.org/packages/1d/02/998dc21333413ce63fe4c1ca70eafe61ca26afc7eb353f20cecdb77d614e/crc32c-2.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f7d1c4e761fe42bf856130daf8b2658df33fe0ced3c43dadafdfeaa42b57b950", size = 49568, upload-time = "2024-09-24T06:18:32.425Z" }, - { url = "https://files.pythonhosted.org/packages/9c/3e/e3656bfa76e50ef87b7136fef2dbf3c46e225629432fc9184fdd7fd187ff/crc32c-2.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:73361c79a6e4605204457f19fda18b042a94508a52e53d10a4239da5fb0f6a34", size = 37019, upload-time = "2024-09-24T06:18:34.097Z" }, - { url = "https://files.pythonhosted.org/packages/0b/7d/5ff9904046ad15a08772515db19df43107bf5e3901a89c36a577b5f40ba0/crc32c-2.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:afd778fc8ac0ed2ffbfb122a9aa6a0e409a8019b894a1799cda12c01534493e0", size = 35373, upload-time = "2024-09-24T06:18:35.02Z" }, - { url = "https://files.pythonhosted.org/packages/4d/41/4aedc961893f26858ab89fc772d0eaba91f9870f19eaa933999dcacb94ec/crc32c-2.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56ef661b34e9f25991fface7f9ad85e81bbc1b3fe3b916fd58c893eabe2fa0b8", size = 54675, upload-time = "2024-09-24T06:18:35.954Z" }, - { url = "https://files.pythonhosted.org/packages/d6/63/8cabf09b7e39b9fec8f7010646c8b33057fc8d67e6093b3cc15563d23533/crc32c-2.7.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:571aa4429444b5d7f588e4377663592145d2d25eb1635abb530f1281794fc7c9", size = 52386, upload-time = "2024-09-24T06:18:36.896Z" }, - { url = "https://files.pythonhosted.org/packages/79/13/13576941bf7cf95026abae43d8427c812c0054408212bf8ed490eda846b0/crc32c-2.7.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c02a3bd67dea95cdb25844aaf44ca2e1b0c1fd70b287ad08c874a95ef4bb38db", size = 53495, upload-time = "2024-09-24T06:18:38.099Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b6/55ffb26d0517d2d6c6f430ce2ad36ae7647c995c5bfd7abce7f32bb2bad1/crc32c-2.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d17637c4867672cb8adeea007294e3c3df9d43964369516cfe2c1f47ce500a", size = 54456, upload-time = "2024-09-24T06:18:39.051Z" }, - { url = "https://files.pythonhosted.org/packages/c2/1a/5562e54cb629ecc5543d3604dba86ddfc7c7b7bf31d64005b38a00d31d31/crc32c-2.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f4a400ac3c69a32e180d8753fd7ec7bccb80ade7ab0812855dce8a208e72495f", size = 52647, upload-time = "2024-09-24T06:18:40.021Z" }, - { url = "https://files.pythonhosted.org/packages/48/ec/ce4138eaf356cd9aae60bbe931755e5e0151b3eca5f491fce6c01b97fd59/crc32c-2.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:588587772e55624dd9c7a906ec9e8773ae0b6ac5e270fc0bc84ee2758eba90d5", size = 53332, upload-time = "2024-09-24T06:18:40.925Z" }, - { url = "https://files.pythonhosted.org/packages/5e/b5/144b42cd838a901175a916078781cb2c3c9f977151c9ba085aebd6d15b22/crc32c-2.7.1-cp312-cp312-win32.whl", hash = "sha256:9f14b60e5a14206e8173dd617fa0c4df35e098a305594082f930dae5488da428", size = 38371, upload-time = "2024-09-24T06:18:42.711Z" }, - { url = "https://files.pythonhosted.org/packages/ae/c4/7929dcd5d9b57db0cce4fe6f6c191049380fc6d8c9b9f5581967f4ec018e/crc32c-2.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:7c810a246660a24dc818047dc5f89c7ce7b2814e1e08a8e99993f4103f7219e8", size = 39805, upload-time = "2024-09-24T06:18:43.6Z" }, + { url = "https://files.pythonhosted.org/packages/77/e6/5723311e6320b35d7755fef8ab10c5d4b55173ce11888e8ddaf62b63091f/crc32c-2.7.1.post0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:65124edce1903484b19109da542d8671a814bdd2cc4006847701449a1f137869", size = 64759, upload-time = "2025-10-13T02:05:01.212Z" }, + { url = "https://files.pythonhosted.org/packages/94/ac/f9550d21a4434b5dad9124ccd6b7cee97ce66bc0cb91a605bf01d9c2475d/crc32c-2.7.1.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a177ee47782f9b83002b08c4d4ba57a6e31dcd96be89d1c6b71f599d9c06bba6", size = 61419, upload-time = "2025-10-13T02:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/ef/39/82874155870dc42c917ff842073c2714955c3c3d0629579a7ca3db1bbcb1/crc32c-2.7.1.post0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65ce2c3f1938b1310c1d592ac97f5e32b2cb67b67ae54ec89e710605f01e91cb", size = 59951, upload-time = "2025-10-13T02:05:03.268Z" }, + { url = "https://files.pythonhosted.org/packages/b9/24/42aa97aac254adeafaa44297654a520db1922dcab4a07bbb965b41d52b66/crc32c-2.7.1.post0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c57ac2129a4adc56b8898c524a33525f008a346edc5df2b1ab7b7bfc4e80bbe", size = 78633, upload-time = "2025-10-13T02:05:04.429Z" }, + { url = "https://files.pythonhosted.org/packages/ac/76/a0c8683b9c38e260d23eb8d419d3ca39277e5e640521e9cc7ca407633ee4/crc32c-2.7.1.post0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3dcdcc73ea9f5e5e32cf1d0868315c62274f8f8fb2a1356e6bf2e958fc7f5bc9", size = 80187, upload-time = "2025-10-13T02:05:05.254Z" }, + { url = "https://files.pythonhosted.org/packages/16/05/978a558f580294e521dc432656e8d1b955ddc73f22870d7e767ff9c1a8d4/crc32c-2.7.1.post0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7cc745faf51a57c056021c005766cd8bacd818213ef424064118747c99a16d70", size = 79263, upload-time = "2025-10-13T02:05:06.121Z" }, + { url = "https://files.pythonhosted.org/packages/cd/48/9efdc54175f56b5aea24fbd9076759e052ca828713590a6bf60f822d40f7/crc32c-2.7.1.post0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1220cf7e97f453e07f998574e663e822e5602ed591b9a2fd436bf65dcae26168", size = 78527, upload-time = "2025-10-13T02:05:06.978Z" }, + { url = "https://files.pythonhosted.org/packages/0f/88/5accf5fb60559df04d0863496a86b912af37e6b7d160d458da25e473a881/crc32c-2.7.1.post0-cp311-cp311-win32.whl", hash = "sha256:d5087f2bc6954b38dc1ceac9b2ea9c9c4956b4e8ce82d965f4c474aefac2d061", size = 63277, upload-time = "2025-10-13T02:05:07.852Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b2/c037161956d00324198a94962788b5e6a6e76b892d96205b15a37bea0c81/crc32c-2.7.1.post0-cp311-cp311-win_amd64.whl", hash = "sha256:feda0b536b1310b0535085835564918df6ba415e0b230734e1386deb7c614c02", size = 65029, upload-time = "2025-10-13T02:05:08.658Z" }, + { url = "https://files.pythonhosted.org/packages/7d/00/243cc1b15bcadf72bd71cf9a33d425715726b95b5f37a85b306d495362f4/crc32c-2.7.1.post0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4eda225a4c49901b9baf1af2aec19dd614c527bac81e02c52d1b9f1d6f6d244c", size = 64820, upload-time = "2025-10-13T02:05:09.476Z" }, + { url = "https://files.pythonhosted.org/packages/6e/76/e63deacf3e5dcd38764a1a617fd25749ea83fe20ff42a7912a855a975a0f/crc32c-2.7.1.post0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e304b07182b915fa9ab5340b51a6845d45331974d73b80a1710405ec8f0b4d44", size = 61474, upload-time = "2025-10-13T02:05:10.44Z" }, + { url = "https://files.pythonhosted.org/packages/c2/96/a341802b0a84fc00f9eca4e7dfdc0f41a69fc226b62ea1661468d4812800/crc32c-2.7.1.post0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1bbd4d2272aa7bdc5527fc3130caf31819e5efad19b6abd7158859f1cc808923", size = 59963, upload-time = "2025-10-13T02:05:11.271Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8a/5e1f6789239935a95a6fb579e5f20dc4032265c5de215cec841d369ad188/crc32c-2.7.1.post0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:eea5fe4f477249f19201b2c1ac9f0df70987593b0dd0e0d15521480500d18455", size = 78461, upload-time = "2025-10-13T02:05:12.077Z" }, + { url = "https://files.pythonhosted.org/packages/e8/7a/bf07239d7f55cf94ad6979de1f97105cdcfa1b73cf8818a5461f37043962/crc32c-2.7.1.post0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc97ce3c913eded8f4d19d5da7492ebb7ab7de1eb05749c8e5c48f4999e263e0", size = 79963, upload-time = "2025-10-13T02:05:13.343Z" }, + { url = "https://files.pythonhosted.org/packages/e3/17/09a11007d66767a1d339424560386c99323e904e5e7f0e75ff4a13156d3c/crc32c-2.7.1.post0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c13bdb21cc11fc2e9b7387fe726e65993f79407b3e4b8c107ee345e9c6cfe399", size = 79040, upload-time = "2025-10-13T02:05:14.216Z" }, + { url = "https://files.pythonhosted.org/packages/b2/ca/4f8d8832524a70f39a20302e171782368fd66474e792b2aaf6bc9bb1ba9d/crc32c-2.7.1.post0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5f9edc07f0617c212d700e31fc6437811b3036f84931e9b837a14169dd0e8d65", size = 78319, upload-time = "2025-10-13T02:05:15.303Z" }, + { url = "https://files.pythonhosted.org/packages/6d/41/63331e510e31928ae5af30fa3d40bca86b8b7c38164b5b59a57cdb8b5a2e/crc32c-2.7.1.post0-cp312-cp312-win32.whl", hash = "sha256:6d205730d184b5ba9a37ee855883b536e40dbf13817d15e4bab4997149c59d82", size = 63286, upload-time = "2025-10-13T02:05:16.181Z" }, + { url = "https://files.pythonhosted.org/packages/ed/3f/05cb1cd66b98f7165b8d181a164ef2c16b7ef0019a191e6ff8defa4df327/crc32c-2.7.1.post0-cp312-cp312-win_amd64.whl", hash = "sha256:f8c1584fe841883300cd3cb0e8341da5a4c954fc2dcf9e0eb15d3b697d90930e", size = 65034, upload-time = "2025-10-13T02:05:17.078Z" }, + { url = "https://files.pythonhosted.org/packages/54/7f/18a4262600e9f772d2b2d10adff4b002d64e5eaa6f0da5e6ded16086e8ad/crc32c-2.7.1.post0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:700d0637f620be903b596fd145d25664c0e821b9b24d462eaa3beeacb906478f", size = 60777, upload-time = "2025-10-13T02:06:10.957Z" }, + { url = "https://files.pythonhosted.org/packages/1b/de/d9a6fdee4b1058b1922b1395814e010e85cb2c1a6ddb1388cbf7523a9f8f/crc32c-2.7.1.post0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:213aa16faf30c267579f9b76cfc572162fccd537095a5533e329318c2e5da589", size = 59663, upload-time = "2025-10-13T02:06:11.844Z" }, + { url = "https://files.pythonhosted.org/packages/4e/86/9e71dd8847ee075504a7ab69a101ab7dff7fd46cc22dbbef242ceeb187bf/crc32c-2.7.1.post0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e1b2b232edd75f3281ab059d2811e4ac674931a1889e0070a2fc73d93c0f204", size = 62539, upload-time = "2025-10-13T02:06:13.075Z" }, + { url = "https://files.pythonhosted.org/packages/67/c0/905905212c0aec771d81df4d88f87008dadeecd6ad628d1e17f9a5acd7dd/crc32c-2.7.1.post0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2e76c1e536f2408c5c5ce796e1a89ef252a438aa011c1f31048aa0783b75626", size = 63248, upload-time = "2025-10-13T02:06:14.764Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ba/6bdc8b946c6db999a0318e620a0f50e2099d9cba3d9c9de05932d12795a5/crc32c-2.7.1.post0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a1ea03ed177cb022d859ce86bac6044d5cd68dcf7e22f022e288a96f2bd6fa2f", size = 65049, upload-time = "2025-10-13T02:06:15.672Z" }, ] [[package]] @@ -1181,43 +1190,49 @@ sdist = { url = "https://files.pythonhosted.org/packages/6b/b0/e595ce2a2527e169c [[package]] name = "cryptography" -version = "45.0.7" +version = "46.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/35/c495bffc2056f2dadb32434f1feedd79abde2a7f8363e1974afa9c33c7e2/cryptography-45.0.7.tar.gz", hash = "sha256:4b1654dfc64ea479c242508eb8c724044f1e964a47d1d1cacc5132292d851971", size = 744980, upload-time = "2025-09-01T11:15:03.146Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/9b/e301418629f7bfdf72db9e80ad6ed9d1b83c487c471803eaa6464c511a01/cryptography-46.0.2.tar.gz", hash = "sha256:21b6fc8c71a3f9a604f028a329e5560009cc4a3a828bfea5fcba8eb7647d88fe", size = 749293, upload-time = "2025-10-01T00:29:11.856Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/91/925c0ac74362172ae4516000fe877912e33b5983df735ff290c653de4913/cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee", size = 7041105, upload-time = "2025-09-01T11:13:59.684Z" }, - { url = "https://files.pythonhosted.org/packages/fc/63/43641c5acce3a6105cf8bd5baeceeb1846bb63067d26dae3e5db59f1513a/cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6", size = 4205799, upload-time = "2025-09-01T11:14:02.517Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/c238dd9107f10bfde09a4d1c52fd38828b1aa353ced11f358b5dd2507d24/cryptography-45.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:577470e39e60a6cd7780793202e63536026d9b8641de011ed9d8174da9ca5339", size = 4430504, upload-time = "2025-09-01T11:14:04.522Z" }, - { url = "https://files.pythonhosted.org/packages/62/62/24203e7cbcc9bd7c94739428cd30680b18ae6b18377ae66075c8e4771b1b/cryptography-45.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4bd3e5c4b9682bc112d634f2c6ccc6736ed3635fc3319ac2bb11d768cc5a00d8", size = 4209542, upload-time = "2025-09-01T11:14:06.309Z" }, - { url = "https://files.pythonhosted.org/packages/cd/e3/e7de4771a08620eef2389b86cd87a2c50326827dea5528feb70595439ce4/cryptography-45.0.7-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:465ccac9d70115cd4de7186e60cfe989de73f7bb23e8a7aa45af18f7412e75bf", size = 3889244, upload-time = "2025-09-01T11:14:08.152Z" }, - { url = "https://files.pythonhosted.org/packages/96/b8/bca71059e79a0bb2f8e4ec61d9c205fbe97876318566cde3b5092529faa9/cryptography-45.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:16ede8a4f7929b4b7ff3642eba2bf79aa1d71f24ab6ee443935c0d269b6bc513", size = 4461975, upload-time = "2025-09-01T11:14:09.755Z" }, - { url = "https://files.pythonhosted.org/packages/58/67/3f5b26937fe1218c40e95ef4ff8d23c8dc05aa950d54200cc7ea5fb58d28/cryptography-45.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8978132287a9d3ad6b54fcd1e08548033cc09dc6aacacb6c004c73c3eb5d3ac3", size = 4209082, upload-time = "2025-09-01T11:14:11.229Z" }, - { url = "https://files.pythonhosted.org/packages/0e/e4/b3e68a4ac363406a56cf7b741eeb80d05284d8c60ee1a55cdc7587e2a553/cryptography-45.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b6a0e535baec27b528cb07a119f321ac024592388c5681a5ced167ae98e9fff3", size = 4460397, upload-time = "2025-09-01T11:14:12.924Z" }, - { url = "https://files.pythonhosted.org/packages/22/49/2c93f3cd4e3efc8cb22b02678c1fad691cff9dd71bb889e030d100acbfe0/cryptography-45.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a24ee598d10befaec178efdff6054bc4d7e883f615bfbcd08126a0f4931c83a6", size = 4337244, upload-time = "2025-09-01T11:14:14.431Z" }, - { url = "https://files.pythonhosted.org/packages/04/19/030f400de0bccccc09aa262706d90f2ec23d56bc4eb4f4e8268d0ddf3fb8/cryptography-45.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa26fa54c0a9384c27fcdc905a2fb7d60ac6e47d14bc2692145f2b3b1e2cfdbd", size = 4568862, upload-time = "2025-09-01T11:14:16.185Z" }, - { url = "https://files.pythonhosted.org/packages/29/56/3034a3a353efa65116fa20eb3c990a8c9f0d3db4085429040a7eef9ada5f/cryptography-45.0.7-cp311-abi3-win32.whl", hash = "sha256:bef32a5e327bd8e5af915d3416ffefdbe65ed975b646b3805be81b23580b57b8", size = 2936578, upload-time = "2025-09-01T11:14:17.638Z" }, - { url = "https://files.pythonhosted.org/packages/b3/61/0ab90f421c6194705a99d0fa9f6ee2045d916e4455fdbb095a9c2c9a520f/cryptography-45.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:3808e6b2e5f0b46d981c24d79648e5c25c35e59902ea4391a0dcb3e667bf7443", size = 3405400, upload-time = "2025-09-01T11:14:18.958Z" }, - { url = "https://files.pythonhosted.org/packages/63/e8/c436233ddf19c5f15b25ace33979a9dd2e7aa1a59209a0ee8554179f1cc0/cryptography-45.0.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bfb4c801f65dd61cedfc61a83732327fafbac55a47282e6f26f073ca7a41c3b2", size = 7021824, upload-time = "2025-09-01T11:14:20.954Z" }, - { url = "https://files.pythonhosted.org/packages/bc/4c/8f57f2500d0ccd2675c5d0cc462095adf3faa8c52294ba085c036befb901/cryptography-45.0.7-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:81823935e2f8d476707e85a78a405953a03ef7b7b4f55f93f7c2d9680e5e0691", size = 4202233, upload-time = "2025-09-01T11:14:22.454Z" }, - { url = "https://files.pythonhosted.org/packages/eb/ac/59b7790b4ccaed739fc44775ce4645c9b8ce54cbec53edf16c74fd80cb2b/cryptography-45.0.7-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3994c809c17fc570c2af12c9b840d7cea85a9fd3e5c0e0491f4fa3c029216d59", size = 4423075, upload-time = "2025-09-01T11:14:24.287Z" }, - { url = "https://files.pythonhosted.org/packages/b8/56/d4f07ea21434bf891faa088a6ac15d6d98093a66e75e30ad08e88aa2b9ba/cryptography-45.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dad43797959a74103cb59c5dac71409f9c27d34c8a05921341fb64ea8ccb1dd4", size = 4204517, upload-time = "2025-09-01T11:14:25.679Z" }, - { url = "https://files.pythonhosted.org/packages/e8/ac/924a723299848b4c741c1059752c7cfe09473b6fd77d2920398fc26bfb53/cryptography-45.0.7-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ce7a453385e4c4693985b4a4a3533e041558851eae061a58a5405363b098fcd3", size = 3882893, upload-time = "2025-09-01T11:14:27.1Z" }, - { url = "https://files.pythonhosted.org/packages/83/dc/4dab2ff0a871cc2d81d3ae6d780991c0192b259c35e4d83fe1de18b20c70/cryptography-45.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b04f85ac3a90c227b6e5890acb0edbaf3140938dbecf07bff618bf3638578cf1", size = 4450132, upload-time = "2025-09-01T11:14:28.58Z" }, - { url = "https://files.pythonhosted.org/packages/12/dd/b2882b65db8fc944585d7fb00d67cf84a9cef4e77d9ba8f69082e911d0de/cryptography-45.0.7-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:48c41a44ef8b8c2e80ca4527ee81daa4c527df3ecbc9423c41a420a9559d0e27", size = 4204086, upload-time = "2025-09-01T11:14:30.572Z" }, - { url = "https://files.pythonhosted.org/packages/5d/fa/1d5745d878048699b8eb87c984d4ccc5da4f5008dfd3ad7a94040caca23a/cryptography-45.0.7-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f3df7b3d0f91b88b2106031fd995802a2e9ae13e02c36c1fc075b43f420f3a17", size = 4449383, upload-time = "2025-09-01T11:14:32.046Z" }, - { url = "https://files.pythonhosted.org/packages/36/8b/fc61f87931bc030598e1876c45b936867bb72777eac693e905ab89832670/cryptography-45.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd342f085542f6eb894ca00ef70236ea46070c8a13824c6bde0dfdcd36065b9b", size = 4332186, upload-time = "2025-09-01T11:14:33.95Z" }, - { url = "https://files.pythonhosted.org/packages/0b/11/09700ddad7443ccb11d674efdbe9a832b4455dc1f16566d9bd3834922ce5/cryptography-45.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1993a1bb7e4eccfb922b6cd414f072e08ff5816702a0bdb8941c247a6b1b287c", size = 4561639, upload-time = "2025-09-01T11:14:35.343Z" }, - { url = "https://files.pythonhosted.org/packages/71/ed/8f4c1337e9d3b94d8e50ae0b08ad0304a5709d483bfcadfcc77a23dbcb52/cryptography-45.0.7-cp37-abi3-win32.whl", hash = "sha256:18fcf70f243fe07252dcb1b268a687f2358025ce32f9f88028ca5c364b123ef5", size = 2926552, upload-time = "2025-09-01T11:14:36.929Z" }, - { url = "https://files.pythonhosted.org/packages/bc/ff/026513ecad58dacd45d1d24ebe52b852165a26e287177de1d545325c0c25/cryptography-45.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:7285a89df4900ed3bfaad5679b1e668cb4b38a8de1ccbfc84b05f34512da0a90", size = 3392742, upload-time = "2025-09-01T11:14:38.368Z" }, - { url = "https://files.pythonhosted.org/packages/99/4e/49199a4c82946938a3e05d2e8ad9482484ba48bbc1e809e3d506c686d051/cryptography-45.0.7-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a862753b36620af6fc54209264f92c716367f2f0ff4624952276a6bbd18cbde", size = 3584634, upload-time = "2025-09-01T11:14:50.593Z" }, - { url = "https://files.pythonhosted.org/packages/16/ce/5f6ff59ea9c7779dba51b84871c19962529bdcc12e1a6ea172664916c550/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:06ce84dc14df0bf6ea84666f958e6080cdb6fe1231be2a51f3fc1267d9f3fb34", size = 4149533, upload-time = "2025-09-01T11:14:52.091Z" }, - { url = "https://files.pythonhosted.org/packages/ce/13/b3cfbd257ac96da4b88b46372e662009b7a16833bfc5da33bb97dd5631ae/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d0c5c6bac22b177bf8da7435d9d27a6834ee130309749d162b26c3105c0795a9", size = 4385557, upload-time = "2025-09-01T11:14:53.551Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c5/8c59d6b7c7b439ba4fc8d0cab868027fd095f215031bc123c3a070962912/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:2f641b64acc00811da98df63df7d59fd4706c0df449da71cb7ac39a0732b40ae", size = 4149023, upload-time = "2025-09-01T11:14:55.022Z" }, - { url = "https://files.pythonhosted.org/packages/55/32/05385c86d6ca9ab0b4d5bb442d2e3d85e727939a11f3e163fc776ce5eb40/cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:f5414a788ecc6ee6bc58560e85ca624258a55ca434884445440a810796ea0e0b", size = 4385722, upload-time = "2025-09-01T11:14:57.319Z" }, - { url = "https://files.pythonhosted.org/packages/23/87/7ce86f3fa14bc11a5a48c30d8103c26e09b6465f8d8e9d74cf7a0714f043/cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63", size = 3332908, upload-time = "2025-09-01T11:14:58.78Z" }, + { url = "https://files.pythonhosted.org/packages/e0/98/7a8df8c19a335c8028414738490fc3955c0cecbfdd37fcc1b9c3d04bd561/cryptography-46.0.2-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3e32ab7dd1b1ef67b9232c4cf5e2ee4cd517d4316ea910acaaa9c5712a1c663", size = 7261255, upload-time = "2025-10-01T00:27:22.947Z" }, + { url = "https://files.pythonhosted.org/packages/c6/38/b2adb2aa1baa6706adc3eb746691edd6f90a656a9a65c3509e274d15a2b8/cryptography-46.0.2-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1fd1a69086926b623ef8126b4c33d5399ce9e2f3fac07c9c734c2a4ec38b6d02", size = 4297596, upload-time = "2025-10-01T00:27:25.258Z" }, + { url = "https://files.pythonhosted.org/packages/e4/27/0f190ada240003119488ae66c897b5e97149292988f556aef4a6a2a57595/cryptography-46.0.2-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb7fb9cd44c2582aa5990cf61a4183e6f54eea3172e54963787ba47287edd135", size = 4450899, upload-time = "2025-10-01T00:27:27.458Z" }, + { url = "https://files.pythonhosted.org/packages/85/d5/e4744105ab02fdf6bb58ba9a816e23b7a633255987310b4187d6745533db/cryptography-46.0.2-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9066cfd7f146f291869a9898b01df1c9b0e314bfa182cef432043f13fc462c92", size = 4300382, upload-time = "2025-10-01T00:27:29.091Z" }, + { url = "https://files.pythonhosted.org/packages/33/fb/bf9571065c18c04818cb07de90c43fc042c7977c68e5de6876049559c72f/cryptography-46.0.2-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:97e83bf4f2f2c084d8dd792d13841d0a9b241643151686010866bbd076b19659", size = 4017347, upload-time = "2025-10-01T00:27:30.767Z" }, + { url = "https://files.pythonhosted.org/packages/35/72/fc51856b9b16155ca071080e1a3ad0c3a8e86616daf7eb018d9565b99baa/cryptography-46.0.2-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:4a766d2a5d8127364fd936572c6e6757682fc5dfcbdba1632d4554943199f2fa", size = 4983500, upload-time = "2025-10-01T00:27:32.741Z" }, + { url = "https://files.pythonhosted.org/packages/c1/53/0f51e926799025e31746d454ab2e36f8c3f0d41592bc65cb9840368d3275/cryptography-46.0.2-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:fab8f805e9675e61ed8538f192aad70500fa6afb33a8803932999b1049363a08", size = 4482591, upload-time = "2025-10-01T00:27:34.869Z" }, + { url = "https://files.pythonhosted.org/packages/86/96/4302af40b23ab8aa360862251fb8fc450b2a06ff24bc5e261c2007f27014/cryptography-46.0.2-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:1e3b6428a3d56043bff0bb85b41c535734204e599c1c0977e1d0f261b02f3ad5", size = 4300019, upload-time = "2025-10-01T00:27:37.029Z" }, + { url = "https://files.pythonhosted.org/packages/9b/59/0be12c7fcc4c5e34fe2b665a75bc20958473047a30d095a7657c218fa9e8/cryptography-46.0.2-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:1a88634851d9b8de8bb53726f4300ab191d3b2f42595e2581a54b26aba71b7cc", size = 4950006, upload-time = "2025-10-01T00:27:40.272Z" }, + { url = "https://files.pythonhosted.org/packages/55/1d/42fda47b0111834b49e31590ae14fd020594d5e4dadd639bce89ad790fba/cryptography-46.0.2-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:be939b99d4e091eec9a2bcf41aaf8f351f312cd19ff74b5c83480f08a8a43e0b", size = 4482088, upload-time = "2025-10-01T00:27:42.668Z" }, + { url = "https://files.pythonhosted.org/packages/17/50/60f583f69aa1602c2bdc7022dae86a0d2b837276182f8c1ec825feb9b874/cryptography-46.0.2-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f13b040649bc18e7eb37936009b24fd31ca095a5c647be8bb6aaf1761142bd1", size = 4425599, upload-time = "2025-10-01T00:27:44.616Z" }, + { url = "https://files.pythonhosted.org/packages/d1/57/d8d4134cd27e6e94cf44adb3f3489f935bde85f3a5508e1b5b43095b917d/cryptography-46.0.2-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9bdc25e4e01b261a8fda4e98618f1c9515febcecebc9566ddf4a70c63967043b", size = 4697458, upload-time = "2025-10-01T00:27:46.209Z" }, + { url = "https://files.pythonhosted.org/packages/d1/2b/531e37408573e1da33adfb4c58875013ee8ac7d548d1548967d94a0ae5c4/cryptography-46.0.2-cp311-abi3-win32.whl", hash = "sha256:8b9bf67b11ef9e28f4d78ff88b04ed0929fcd0e4f70bb0f704cfc32a5c6311ee", size = 3056077, upload-time = "2025-10-01T00:27:48.424Z" }, + { url = "https://files.pythonhosted.org/packages/a8/cd/2f83cafd47ed2dc5a3a9c783ff5d764e9e70d3a160e0df9a9dcd639414ce/cryptography-46.0.2-cp311-abi3-win_amd64.whl", hash = "sha256:758cfc7f4c38c5c5274b55a57ef1910107436f4ae842478c4989abbd24bd5acb", size = 3512585, upload-time = "2025-10-01T00:27:50.521Z" }, + { url = "https://files.pythonhosted.org/packages/00/36/676f94e10bfaa5c5b86c469ff46d3e0663c5dc89542f7afbadac241a3ee4/cryptography-46.0.2-cp311-abi3-win_arm64.whl", hash = "sha256:218abd64a2e72f8472c2102febb596793347a3e65fafbb4ad50519969da44470", size = 2927474, upload-time = "2025-10-01T00:27:52.91Z" }, + { url = "https://files.pythonhosted.org/packages/d5/bb/fa95abcf147a1b0bb94d95f53fbb09da77b24c776c5d87d36f3d94521d2c/cryptography-46.0.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a08e7401a94c002e79dc3bc5231b6558cd4b2280ee525c4673f650a37e2c7685", size = 7248090, upload-time = "2025-10-01T00:28:22.846Z" }, + { url = "https://files.pythonhosted.org/packages/b7/66/f42071ce0e3ffbfa80a88feadb209c779fda92a23fbc1e14f74ebf72ef6b/cryptography-46.0.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d30bc11d35743bf4ddf76674a0a369ec8a21f87aaa09b0661b04c5f6c46e8d7b", size = 4293123, upload-time = "2025-10-01T00:28:25.072Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/1fdbd2e5c1ba822828d250e5a966622ef00185e476d1cd2726b6dd135e53/cryptography-46.0.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bca3f0ce67e5a2a2cf524e86f44697c4323a86e0fd7ba857de1c30d52c11ede1", size = 4439524, upload-time = "2025-10-01T00:28:26.808Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c1/5e4989a7d102d4306053770d60f978c7b6b1ea2ff8c06e0265e305b23516/cryptography-46.0.2-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff798ad7a957a5021dcbab78dfff681f0cf15744d0e6af62bd6746984d9c9e9c", size = 4297264, upload-time = "2025-10-01T00:28:29.327Z" }, + { url = "https://files.pythonhosted.org/packages/28/78/b56f847d220cb1d6d6aef5a390e116ad603ce13a0945a3386a33abc80385/cryptography-46.0.2-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:cb5e8daac840e8879407acbe689a174f5ebaf344a062f8918e526824eb5d97af", size = 4011872, upload-time = "2025-10-01T00:28:31.479Z" }, + { url = "https://files.pythonhosted.org/packages/e1/80/2971f214b066b888944f7b57761bf709ee3f2cf805619a18b18cab9b263c/cryptography-46.0.2-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:3f37aa12b2d91e157827d90ce78f6180f0c02319468a0aea86ab5a9566da644b", size = 4978458, upload-time = "2025-10-01T00:28:33.267Z" }, + { url = "https://files.pythonhosted.org/packages/a5/84/0cb0a2beaa4f1cbe63ebec4e97cd7e0e9f835d0ba5ee143ed2523a1e0016/cryptography-46.0.2-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e38f203160a48b93010b07493c15f2babb4e0f2319bbd001885adb3f3696d21", size = 4472195, upload-time = "2025-10-01T00:28:36.039Z" }, + { url = "https://files.pythonhosted.org/packages/30/8b/2b542ddbf78835c7cd67b6fa79e95560023481213a060b92352a61a10efe/cryptography-46.0.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d19f5f48883752b5ab34cff9e2f7e4a7f216296f33714e77d1beb03d108632b6", size = 4296791, upload-time = "2025-10-01T00:28:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/78/12/9065b40201b4f4876e93b9b94d91feb18de9150d60bd842a16a21565007f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:04911b149eae142ccd8c9a68892a70c21613864afb47aba92d8c7ed9cc001023", size = 4939629, upload-time = "2025-10-01T00:28:39.654Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9e/6507dc048c1b1530d372c483dfd34e7709fc542765015425f0442b08547f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8b16c1ede6a937c291d41176934268e4ccac2c6521c69d3f5961c5a1e11e039e", size = 4471988, upload-time = "2025-10-01T00:28:41.822Z" }, + { url = "https://files.pythonhosted.org/packages/b1/86/d025584a5f7d5c5ec8d3633dbcdce83a0cd579f1141ceada7817a4c26934/cryptography-46.0.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:747b6f4a4a23d5a215aadd1d0b12233b4119c4313df83ab4137631d43672cc90", size = 4422989, upload-time = "2025-10-01T00:28:43.608Z" }, + { url = "https://files.pythonhosted.org/packages/4b/39/536370418b38a15a61bbe413006b79dfc3d2b4b0eafceb5581983f973c15/cryptography-46.0.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6b275e398ab3a7905e168c036aad54b5969d63d3d9099a0a66cc147a3cc983be", size = 4685578, upload-time = "2025-10-01T00:28:45.361Z" }, + { url = "https://files.pythonhosted.org/packages/15/52/ea7e2b1910f547baed566c866fbb86de2402e501a89ecb4871ea7f169a81/cryptography-46.0.2-cp38-abi3-win32.whl", hash = "sha256:0b507c8e033307e37af61cb9f7159b416173bdf5b41d11c4df2e499a1d8e007c", size = 3036711, upload-time = "2025-10-01T00:28:47.096Z" }, + { url = "https://files.pythonhosted.org/packages/71/9e/171f40f9c70a873e73c2efcdbe91e1d4b1777a03398fa1c4af3c56a2477a/cryptography-46.0.2-cp38-abi3-win_amd64.whl", hash = "sha256:f9b2dc7668418fb6f221e4bf701f716e05e8eadb4f1988a2487b11aedf8abe62", size = 3500007, upload-time = "2025-10-01T00:28:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/3e/7c/15ad426257615f9be8caf7f97990cf3dcbb5b8dd7ed7e0db581a1c4759dd/cryptography-46.0.2-cp38-abi3-win_arm64.whl", hash = "sha256:91447f2b17e83c9e0c89f133119d83f94ce6e0fb55dd47da0a959316e6e9cfa1", size = 2918153, upload-time = "2025-10-01T00:28:51.003Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/1aabe338149a7d0f52c3e30f2880b20027ca2a485316756ed6f000462db3/cryptography-46.0.2-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1d3b3edd145953832e09607986f2bd86f85d1dc9c48ced41808b18009d9f30e5", size = 3714495, upload-time = "2025-10-01T00:28:57.222Z" }, + { url = "https://files.pythonhosted.org/packages/e3/0a/0d10eb970fe3e57da9e9ddcfd9464c76f42baf7b3d0db4a782d6746f788f/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:fe245cf4a73c20592f0f48da39748b3513db114465be78f0a36da847221bd1b4", size = 4243379, upload-time = "2025-10-01T00:28:58.989Z" }, + { url = "https://files.pythonhosted.org/packages/7d/60/e274b4d41a9eb82538b39950a74ef06e9e4d723cb998044635d9deb1b435/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2b9cad9cf71d0c45566624ff76654e9bae5f8a25970c250a26ccfc73f8553e2d", size = 4409533, upload-time = "2025-10-01T00:29:00.785Z" }, + { url = "https://files.pythonhosted.org/packages/19/9a/fb8548f762b4749aebd13b57b8f865de80258083fe814957f9b0619cfc56/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9bd26f2f75a925fdf5e0a446c0de2714f17819bf560b44b7480e4dd632ad6c46", size = 4243120, upload-time = "2025-10-01T00:29:02.515Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/883f24147fd4a0c5cab74ac7e36a1ff3094a54ba5c3a6253d2ff4b19255b/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:7282d8f092b5be7172d6472f29b0631f39f18512a3642aefe52c3c0e0ccfad5a", size = 4408940, upload-time = "2025-10-01T00:29:04.42Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b5/c5e179772ec38adb1c072b3aa13937d2860509ba32b2462bf1dda153833b/cryptography-46.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c4b93af7920cdf80f71650769464ccf1fb49a4b56ae0024173c24c48eb6b1612", size = 3438518, upload-time = "2025-10-01T00:29:06.139Z" }, ] [[package]] @@ -1357,6 +1372,7 @@ dependencies = [ { name = "transformers" }, { name = "unstructured", extra = ["docx", "epub", "md", "ppt", "pptx"] }, { name = "weave" }, + { name = "weaviate-client" }, { name = "webvtt-py" }, { name = "yarl" }, ] @@ -1379,6 +1395,7 @@ dev = [ { name = "pytest-cov" }, { name = "pytest-env" }, { name = "pytest-mock" }, + { name = "pytest-timeout" }, { name = "ruff" }, { name = "scipy-stubs" }, { name = "sseclient-py" }, @@ -1478,7 +1495,7 @@ requires-dist = [ { name = "celery", specifier = "~=5.5.2" }, { name = "chardet", specifier = "~=5.1.0" }, { name = "flask", specifier = "~=3.1.2" }, - { name = "flask-compress", specifier = "~=1.17" }, + { name = "flask-compress", specifier = ">=1.17,<1.18" }, { name = "flask-cors", specifier = "~=6.0.0" }, { name = "flask-login", specifier = "~=0.6.3" }, { name = "flask-migrate", specifier = "~=4.0.7" }, @@ -1528,7 +1545,7 @@ requires-dist = [ { name = "pycryptodome", specifier = "==3.19.1" }, { name = "pydantic", specifier = "~=2.11.4" }, { name = "pydantic-extra-types", specifier = "~=2.10.3" }, - { name = "pydantic-settings", specifier = "~=2.9.1" }, + { name = "pydantic-settings", specifier = "~=2.11.0" }, { name = "pyjwt", specifier = "~=2.10.1" }, { name = "pypdfium2", specifier = "==4.30.0" }, { name = "python-docx", specifier = "~=1.1.0" }, @@ -1546,6 +1563,7 @@ requires-dist = [ { name = "transformers", specifier = "~=4.56.1" }, { name = "unstructured", extras = ["docx", "epub", "md", "ppt", "pptx"], specifier = "~=0.16.1" }, { name = "weave", specifier = "~=0.51.0" }, + { name = "weaviate-client", specifier = "==4.17.0" }, { name = "webvtt-py", specifier = "~=0.5.1" }, { name = "yarl", specifier = "~=1.18.3" }, ] @@ -1568,6 +1586,7 @@ dev = [ { name = "pytest-cov", specifier = "~=4.1.0" }, { name = "pytest-env", specifier = "~=1.1.3" }, { name = "pytest-mock", specifier = "~=3.14.0" }, + { name = "pytest-timeout", specifier = ">=2.4.0" }, { name = "ruff", specifier = "~=0.14.0" }, { name = "scipy-stubs", specifier = ">=1.15.3.0" }, { name = "sseclient-py", specifier = ">=1.8.0" }, @@ -1652,7 +1671,7 @@ vdb = [ { name = "tidb-vector", specifier = "==0.0.9" }, { name = "upstash-vector", specifier = "==0.6.0" }, { name = "volcengine-compat", specifier = "~=1.0.0" }, - { name = "weaviate-client", specifier = "~=3.24.0" }, + { name = "weaviate-client", specifier = "==4.17.0" }, { name = "xinference-client", specifier = "~=1.2.2" }, ] @@ -1722,18 +1741,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" }, ] -[[package]] -name = "ecdsa" -version = "0.19.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" }, -] - [[package]] name = "elastic-transport" version = "8.17.1" @@ -1761,11 +1768,11 @@ wheels = [ [[package]] name = "emoji" -version = "2.14.1" +version = "2.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cb/7d/01cddcbb6f5cc0ba72e00ddf9b1fa206c802d557fd0a20b18e130edf1336/emoji-2.14.1.tar.gz", hash = "sha256:f8c50043d79a2c1410ebfae833ae1868d5941a67a6cd4d18377e2eb0bd79346b", size = 597182, upload-time = "2025-01-16T06:31:24.983Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/78/0d2db9382c92a163d7095fc08efff7800880f830a152cfced40161e7638d/emoji-2.15.0.tar.gz", hash = "sha256:eae4ab7d86456a70a00a985125a03263a5eac54cd55e51d7e184b1ed3b6757e4", size = 615483, upload-time = "2025-09-21T12:13:02.755Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/db/a0335710caaa6d0aebdaa65ad4df789c15d89b7babd9a30277838a7d9aac/emoji-2.14.1-py3-none-any.whl", hash = "sha256:35a8a486c1460addb1499e3bf7929d3889b2e2841a57401903699fef595e942b", size = 590617, upload-time = "2025-01-16T06:31:23.526Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/4b5aaaabddfacfe36ba7768817bd1f71a7a810a43705e531f3ae4c690767/emoji-2.15.0-py3-none-any.whl", hash = "sha256:205296793d66a89d88af4688fa57fd6496732eb48917a87175a023c8138995eb", size = 608433, upload-time = "2025-09-21T12:13:01.197Z" }, ] [[package]] @@ -1812,16 +1819,46 @@ wheels = [ [[package]] name = "fastapi" -version = "0.116.1" +version = "0.119.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/f9/5c5bcce82a7997cc0eb8c47b7800f862f6b56adc40486ed246e5010d443b/fastapi-0.119.0.tar.gz", hash = "sha256:451082403a2c1f0b99c6bd57c09110ed5463856804c8078d38e5a1f1035dbbb7", size = 336756, upload-time = "2025-10-11T17:13:40.53Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" }, + { url = "https://files.pythonhosted.org/packages/ce/70/584c4d7cad80f5e833715c0a29962d7c93b4d18eed522a02981a6d1b6ee5/fastapi-0.119.0-py3-none-any.whl", hash = "sha256:90a2e49ed19515320abb864df570dd766be0662c5d577688f1600170f7f73cf2", size = 107095, upload-time = "2025-10-11T17:13:39.048Z" }, +] + +[[package]] +name = "fastuuid" +version = "0.13.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/80/3c16a1edad2e6cd82fbd15ac998cc1b881f478bf1f80ca717d941c441874/fastuuid-0.13.5.tar.gz", hash = "sha256:d4976821ab424d41542e1ea39bc828a9d454c3f8a04067c06fca123c5b95a1a1", size = 18255, upload-time = "2025-09-26T09:05:38.281Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/ab/9351bfc04ff2144115758233130b5469993d3d379323903a4634cb9c78c1/fastuuid-0.13.5-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c122558ca4b5487e2bd0863467e4ccfe636afd1274803741487d48f2e32ea0e1", size = 493910, upload-time = "2025-09-26T09:12:36.995Z" }, + { url = "https://files.pythonhosted.org/packages/b7/ab/84fac529cc12a03d49595e70ac459380f7cb12c70f0fe401781b276f9e94/fastuuid-0.13.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d7abd42a03a17a681abddd19aa4d44ca2747138cf8a48373b395cf1341a10de2", size = 252621, upload-time = "2025-09-26T09:12:22.222Z" }, + { url = "https://files.pythonhosted.org/packages/7f/9d/f4c734d7b74a04ca695781c58a1376f07b206fe2849e58e7778d476a0e94/fastuuid-0.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2705cf7c2d6f7c03053404b75a4c44f872a73f6f9d5ea34f1dc6bba400c4a97c", size = 244269, upload-time = "2025-09-26T09:08:31.921Z" }, + { url = "https://files.pythonhosted.org/packages/5b/da/b42b7eb84523d69cfe9dac82950e105061c8d59f4d4d2cc3e170dbd20937/fastuuid-0.13.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d220a056fcbad25932c1f25304261198612f271f4d150b2a84e81adb877daf7", size = 271528, upload-time = "2025-09-26T09:12:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/1b/45/6eee36929119e9544b0906fd6591e685d682e4b51cfad4c25d96ccf04009/fastuuid-0.13.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f29f93b5a0c5f5579f97f77d5319e9bfefd61d8678ec59d850201544faf33bf", size = 272168, upload-time = "2025-09-26T09:07:04.238Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ac/75b70f13515e12194a25b0459dd8a8a33de4ab0a92142f0776d21e41ca84/fastuuid-0.13.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:399d86623fb806151b1feb9fdd818ebfc1d50387199a35f7264f98dfc1540af5", size = 290948, upload-time = "2025-09-26T09:07:53.433Z" }, + { url = "https://files.pythonhosted.org/packages/76/30/1801326a5b433aafc04eae906e6b005e8a3d1120fd996409fe88124edb06/fastuuid-0.13.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:689e8795a1edd573b2c9a455024e4edf605a9690339bba29709857f7180894ea", size = 452932, upload-time = "2025-09-26T09:09:28.017Z" }, + { url = "https://files.pythonhosted.org/packages/61/2a/080b6b2ac4ef2ead54a7463ae4162d66a52867bbd4447ad5354427b82ae2/fastuuid-0.13.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:25e82c4a1734da168b36f7308e397afbe9c9b353799a9c69563a605f11dd4641", size = 468384, upload-time = "2025-09-26T09:08:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d3/4a3ffcaf8d874f7f208dad7e98ded7c5359b6599073960e3aa0530ca6139/fastuuid-0.13.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f62299e3cca69aad6a6fb37e26e45055587954d498ad98903fea24382377ea0e", size = 444815, upload-time = "2025-09-26T09:06:38.691Z" }, + { url = "https://files.pythonhosted.org/packages/9d/a0/08dd8663f7bff3e9c0b2416708b01d1fb65f52bcd4bce18760f77c4735fd/fastuuid-0.13.5-cp311-cp311-win32.whl", hash = "sha256:68227f2230381b89fb1ad362ca6e433de85c6c11c36312b41757cad47b8a8e32", size = 144897, upload-time = "2025-09-26T09:14:53.695Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e2/2c2a37dcc56e2323c6214c38c8faac22f9d03d98c481f8a40843e0b9526a/fastuuid-0.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:4a32306982bd031cb20d5d1a726b7b958a55babebd2300ce6c8e352d3496e931", size = 150523, upload-time = "2025-09-26T09:12:24.031Z" }, + { url = "https://files.pythonhosted.org/packages/21/36/434f137c5970cac19e57834e1f7680e85301619d49891618c00666700c61/fastuuid-0.13.5-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:35fe8045e866bc6846f8de6fa05acb1de0c32478048484a995e96d31e21dff2a", size = 494638, upload-time = "2025-09-26T09:14:58.695Z" }, + { url = "https://files.pythonhosted.org/packages/ca/3c/083de2ac007b2b305523b9c006dba5051e5afd87a626ef1a39f76e2c6b82/fastuuid-0.13.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:02a460333f52d731a006d18a52ef6fcb2d295a1f5b1a5938d30744191b2f77b7", size = 253138, upload-time = "2025-09-26T09:13:33.283Z" }, + { url = "https://files.pythonhosted.org/packages/73/5e/630cffa1c8775db526e39e9e4c5c7db0c27be0786bb21ba82c912ae19f63/fastuuid-0.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:74b0e4f8c307b9f477a5d7284db4431ce53a3c1e3f4173db7a97db18564a6202", size = 244521, upload-time = "2025-09-26T09:14:40.682Z" }, + { url = "https://files.pythonhosted.org/packages/4d/51/55d78705f4fbdadf88fb40f382f508d6c7a4941ceddd7825fafebb4cc778/fastuuid-0.13.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6955a99ef455c2986f3851f4e0ccc35dec56ac1a7720f2b92e88a75d6684512e", size = 271557, upload-time = "2025-09-26T09:15:09.75Z" }, + { url = "https://files.pythonhosted.org/packages/6a/2b/1b89e90a8635e5587ccdbbeb169c590672ce7637880f2c047482a0359950/fastuuid-0.13.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f10c77b826738c1a27dcdaa92ea4dc1ec9d869748a99e1fde54f1379553d4854", size = 272334, upload-time = "2025-09-26T09:07:48.865Z" }, + { url = "https://files.pythonhosted.org/packages/0c/06/4c8207894eeb30414999e5c3f66ac039bc4003437eb4060d8a1bceb4cc6f/fastuuid-0.13.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb25dccbeb249d16d5e664f65f17ebec05136821d5ef462c4110e3f76b86fb86", size = 290594, upload-time = "2025-09-26T09:12:54.124Z" }, + { url = "https://files.pythonhosted.org/packages/50/69/96d221931a31d77a47cc2487bdfacfb3091edfc2e7a04b1795df1aec05df/fastuuid-0.13.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5becc646a3eeafb76ce0a6783ba190cd182e3790a8b2c78ca9db2b5e87af952", size = 452835, upload-time = "2025-09-26T09:14:00.994Z" }, + { url = "https://files.pythonhosted.org/packages/25/ef/bf045f0a47dcec96247497ef3f7a31d86ebc074330e2dccc34b8dbc0468a/fastuuid-0.13.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:69b34363752d06e9bb0dbdf02ae391ec56ac948c6f2eb00be90dad68e80774b9", size = 468225, upload-time = "2025-09-26T09:13:38.585Z" }, + { url = "https://files.pythonhosted.org/packages/30/46/4817ab5a3778927155a4bde92540d4c4fa996161ec8b8e080c8928b0984e/fastuuid-0.13.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57d0768afcad0eab8770c9b8cf904716bd3c547e8b9a4e755ee8a673b060a3a3", size = 444907, upload-time = "2025-09-26T09:14:30.163Z" }, + { url = "https://files.pythonhosted.org/packages/80/27/ab284117ce4dc9b356a7196bdbf220510285f201d27f1f078592cdc8187b/fastuuid-0.13.5-cp312-cp312-win32.whl", hash = "sha256:8ac6c6f5129d52eaa6ef9ea4b6e2f7c69468a053f3ab8e439661186b9c06bb85", size = 145415, upload-time = "2025-09-26T09:08:59.494Z" }, + { url = "https://files.pythonhosted.org/packages/f4/0c/f970a4222773b248931819f8940800b760283216ca3dda173ed027e94bdd/fastuuid-0.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:ad630e97715beefef07ec37c9c162336e500400774e2c1cbe1a0df6f80d15b9a", size = 150840, upload-time = "2025-09-26T09:13:46.115Z" }, ] [[package]] @@ -1838,11 +1875,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.19.1" +version = "3.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, + { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, ] [[package]] @@ -1873,17 +1910,18 @@ wheels = [ [[package]] name = "flask-compress" -version = "1.18" +version = "1.17" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "brotli", marker = "platform_python_implementation != 'PyPy'" }, { name = "brotlicffi", marker = "platform_python_implementation == 'PyPy'" }, { name = "flask" }, - { name = "pyzstd" }, + { name = "zstandard" }, + { name = "zstandard", marker = "platform_python_implementation == 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/33/77/7d3c1b071e29c09bd796a84f95442f3c75f24a1f2a9f2c86c857579ab4ec/flask_compress-1.18.tar.gz", hash = "sha256:fdbae1bd8e334dfdc8b19549829163987c796fafea7fa1c63f9a4add23c8413a", size = 16571, upload-time = "2025-07-11T14:08:13.496Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/1f/260db5a4517d59bfde7b4a0d71052df68fb84983bda9231100e3b80f5989/flask_compress-1.17.tar.gz", hash = "sha256:1ebb112b129ea7c9e7d6ee6d5cc0d64f226cbc50c4daddf1a58b9bd02253fbd8", size = 15733, upload-time = "2024-10-14T08:13:33.196Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/d8/953232867e42b5b91899e9c6c4a2b89218a5fbbdbbb4493f48729770de81/flask_compress-1.18-py3-none-any.whl", hash = "sha256:9c3b7defbd0f29a06e51617b910eab07bd4db314507e4edc4c6b02a2e139fda9", size = 9340, upload-time = "2025-07-11T14:08:12.275Z" }, + { url = "https://files.pythonhosted.org/packages/f7/54/ff08f947d07c0a8a5d8f1c8e57b142c97748ca912b259db6467ab35983cd/Flask_Compress-1.17-py3-none-any.whl", hash = "sha256:415131f197c41109f08e8fdfc3a6628d83d81680fb5ecd0b3a97410e02397b20", size = 8723, upload-time = "2024-10-14T08:13:31.726Z" }, ] [[package]] @@ -1941,19 +1979,19 @@ wheels = [ [[package]] name = "flask-restx" -version = "1.3.0" +version = "1.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aniso8601" }, { name = "flask" }, { name = "importlib-resources" }, { name = "jsonschema" }, - { name = "pytz" }, + { name = "referencing" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/4c/2e7d84e2b406b47cf3bf730f521efe474977b404ee170d8ea68dc37e6733/flask-restx-1.3.0.tar.gz", hash = "sha256:4f3d3fa7b6191fcc715b18c201a12cd875176f92ba4acc61626ccfd571ee1728", size = 2814072, upload-time = "2023-12-10T14:48:55.575Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/89/9b9ca58cbb8e9ec46f4a510ba93878e0c88d518bf03c350e3b1b7ad85cbe/flask-restx-1.3.2.tar.gz", hash = "sha256:0ae13d77e7d7e4dce513970cfa9db45364aef210e99022de26d2b73eb4dbced5", size = 2814719, upload-time = "2025-09-23T20:34:25.21Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/bf/1907369f2a7ee614dde5152ff8f811159d357e77962aa3f8c2e937f63731/flask_restx-1.3.0-py2.py3-none-any.whl", hash = "sha256:636c56c3fb3f2c1df979e748019f084a938c4da2035a3e535a4673e4fc177691", size = 2798683, upload-time = "2023-12-10T14:48:53.293Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3f/b82cd8e733a355db1abb8297afbf59ec972c00ef90bf8d4eed287958b204/flask_restx-1.3.2-py2.py3-none-any.whl", hash = "sha256:6e035496e8223668044fc45bf769e526352fd648d9e159bd631d94fd645a687b", size = 2799859, upload-time = "2025-09-23T20:34:23.055Z" }, ] [[package]] @@ -1971,54 +2009,52 @@ wheels = [ [[package]] name = "flatbuffers" -version = "25.2.10" +version = "25.9.23" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170, upload-time = "2025-02-11T04:26:46.257Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/1f/3ee70b0a55137442038f2a33469cc5fddd7e0ad2abf83d7497c18a2b6923/flatbuffers-25.9.23.tar.gz", hash = "sha256:676f9fa62750bb50cf531b42a0a2a118ad8f7f797a511eda12881c016f093b12", size = 22067, upload-time = "2025-09-24T05:25:30.106Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953, upload-time = "2025-02-11T04:26:44.484Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1b/00a78aa2e8fbd63f9af08c9c19e6deb3d5d66b4dda677a0f61654680ee89/flatbuffers-25.9.23-py2.py3-none-any.whl", hash = "sha256:255538574d6cb6d0a79a17ec8bc0d30985913b87513a01cce8bcdb6b4c44d0e2", size = 30869, upload-time = "2025-09-24T05:25:28.912Z" }, ] [[package]] name = "frozenlist" -version = "1.7.0" +version = "1.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", size = 82251, upload-time = "2025-06-09T23:00:16.279Z" }, - { url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", size = 48183, upload-time = "2025-06-09T23:00:17.698Z" }, - { url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", size = 47107, upload-time = "2025-06-09T23:00:18.952Z" }, - { url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", size = 237333, upload-time = "2025-06-09T23:00:20.275Z" }, - { url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", size = 231724, upload-time = "2025-06-09T23:00:21.705Z" }, - { url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", size = 245842, upload-time = "2025-06-09T23:00:23.148Z" }, - { url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", size = 239767, upload-time = "2025-06-09T23:00:25.103Z" }, - { url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", size = 224130, upload-time = "2025-06-09T23:00:27.061Z" }, - { url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", size = 235301, upload-time = "2025-06-09T23:00:29.02Z" }, - { url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", size = 234606, upload-time = "2025-06-09T23:00:30.514Z" }, - { url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", size = 248372, upload-time = "2025-06-09T23:00:31.966Z" }, - { url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", size = 229860, upload-time = "2025-06-09T23:00:33.375Z" }, - { url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5", size = 245893, upload-time = "2025-06-09T23:00:35.002Z" }, - { url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", size = 246323, upload-time = "2025-06-09T23:00:36.468Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", size = 233149, upload-time = "2025-06-09T23:00:37.963Z" }, - { url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", size = 39565, upload-time = "2025-06-09T23:00:39.753Z" }, - { url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", size = 44019, upload-time = "2025-06-09T23:00:40.988Z" }, - { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, - { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, - { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, - { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, - { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, - { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, - { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, - { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, - { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, - { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, - { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, - { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, - { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, - { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, - { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, - { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, - { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] [[package]] @@ -2396,51 +2432,51 @@ wheels = [ [[package]] name = "grimp" -version = "3.11" +version = "3.12" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/5e/1be34b2aed713fca8b9274805fc295d54f9806fccbfb15451fdb60066b23/grimp-3.11.tar.gz", hash = "sha256:920d069a6c591b830d661e0f7e78743d276e05df1072dc139fc2ee314a5e723d", size = 844989, upload-time = "2025-09-01T07:25:34.148Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/a4/463903a1cfbc19d3e7125d6614bb900df2b34dd675c7d93544d154819d2b/grimp-3.12.tar.gz", hash = "sha256:1a733b1d719c42bd2fada58240975fa7d09936b57120c34b64cfb31e42701010", size = 845594, upload-time = "2025-10-09T09:51:02.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d3/f1/39fa82cf6738cea7ae454a739a0b4a233ccc2905e2506821cdcad85fef1c/grimp-3.11-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8271906dadd01f9a866c411aa8c4f15cf0469d8476734d3672f55d1fdad05ddf", size = 2015949, upload-time = "2025-09-01T07:24:38.836Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a2/19209b8680899034c74340c115770b3f0fe6186b2a8779ce3e578aa3ab30/grimp-3.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cb20844c1ec8729627dcbf8ca18fe6e2fb0c0cd34683c6134cd89542538d12a1", size = 1929047, upload-time = "2025-09-01T07:24:31.813Z" }, - { url = "https://files.pythonhosted.org/packages/ee/b1/cef086ed0fc3c1b2bba413f55cae25ebdd3ff11bc683639ba8fc29b09d7b/grimp-3.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e39c47886320b2980d14f31351377d824683748d5982c34283461853b5528102", size = 2093705, upload-time = "2025-09-01T07:23:18.927Z" }, - { url = "https://files.pythonhosted.org/packages/92/4a/6945c6a5267d01d2e321ba622d1fc138552bd2a69d220c6baafb60a128da/grimp-3.11-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1add91bf2e024321c770f1271799576d22a3f7527ed662e304f40e73c6a14138", size = 2045422, upload-time = "2025-09-01T07:23:31.571Z" }, - { url = "https://files.pythonhosted.org/packages/49/1a/4bfb34cd6cbf4d712305c2f452e650772cbc43773f1484513375e9b83a31/grimp-3.11-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bb0bc0995de10135d3b5dc5dbe1450d88a0fa7331ec7885db31569ad61e4d9", size = 2194719, upload-time = "2025-09-01T07:24:13.206Z" }, - { url = "https://files.pythonhosted.org/packages/d6/93/e6d9f9a1fbc78df685b9e970c28d3339ae441f7da970567d65b63c7a199e/grimp-3.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9152657e63ad0dee6029fe612d5550fb1c029c987b496a53a4d49246e772bd7b", size = 2391047, upload-time = "2025-09-01T07:23:48.095Z" }, - { url = "https://files.pythonhosted.org/packages/0f/44/f28d0a88161a55751da335b22d252ef6e2fa3fa9e5111f5a5b26caa66e8f/grimp-3.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352ba7f1aba578315dddb00eff873e3fbc0c7386b3d64bbc1fe8e28d2e12eda2", size = 2241597, upload-time = "2025-09-01T07:24:00.354Z" }, - { url = "https://files.pythonhosted.org/packages/15/89/2957413b54c047e87f8ea6611929ef0bbaedbab00399166119b5a164a430/grimp-3.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1291a323bbf30b0387ee547655a693b034376d9354797a076c53839966149e3", size = 2153283, upload-time = "2025-09-01T07:24:22.706Z" }, - { url = "https://files.pythonhosted.org/packages/3d/83/69162edb2c49fff21a42fca68f51fbb93006a1b6a10c0f329a61a7a943e8/grimp-3.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d4b47faa3a35ccee75039343267d990f03c7f39af8abe01a99f41c83339c5df4", size = 2269299, upload-time = "2025-09-01T07:24:45.272Z" }, - { url = "https://files.pythonhosted.org/packages/5f/22/1bbf95e4bab491a847f0409d19d9c343a8c361ab1f2921b13318278d937a/grimp-3.11-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:cae0cc48584389df4f2ff037373cec5dbd4f3c7025583dc69724d5c453fc239b", size = 2305354, upload-time = "2025-09-01T07:24:57.413Z" }, - { url = "https://files.pythonhosted.org/packages/1f/fd/2d40ed913744202e5d7625936f8bd9e1d44d1a062abbfc25858e7c9acd6a/grimp-3.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3ba13bd9e58349c48a6d420a62f244b3eee2c47aedf99db64c44ba67d07e64d6", size = 2299647, upload-time = "2025-09-01T07:25:10.188Z" }, - { url = "https://files.pythonhosted.org/packages/15/be/6e721a258045285193a16f4be9e898f7df5cc28f0b903eb010d8a7035841/grimp-3.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef2ee94b2a0ec7e8ca90d63a724d77527632ab3825381610bd36891fbcc49071", size = 2323713, upload-time = "2025-09-01T07:25:22.678Z" }, - { url = "https://files.pythonhosted.org/packages/5e/ad/0ae7a1753f4d60d5a9bebefd112bb83ef115541ec7b509565a9fbb712d60/grimp-3.11-cp311-cp311-win32.whl", hash = "sha256:b4810484e05300bc3dfffaeaaa89c07dcfd6e1712ddcbe2e14911c0da5737d40", size = 1707055, upload-time = "2025-09-01T07:25:43.719Z" }, - { url = "https://files.pythonhosted.org/packages/df/b7/af81165c2144043293b0729d6be92885c52a38aadff16e6ac9418baab30f/grimp-3.11-cp311-cp311-win_amd64.whl", hash = "sha256:31b9b8fd334dc959d3c3b0d7761f805decb628c4eac98ff7707c8b381576e48f", size = 1809864, upload-time = "2025-09-01T07:25:36.724Z" }, - { url = "https://files.pythonhosted.org/packages/06/ad/271c0f2b49be72119ad3724e4da3ba607c533c8aa2709078a51f21428fab/grimp-3.11-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2731b03deeea57ec3722325c3ebfa25b6ec4bc049d6b5a853ac45bb173843537", size = 2011143, upload-time = "2025-09-01T07:24:40.113Z" }, - { url = "https://files.pythonhosted.org/packages/40/85/858811346c77bbbe6e62ffaa5367f46990a30a47e77ce9f6c0f3d65a42bd/grimp-3.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39953c320e235e2fb7f0ad10b066ddd526ab26bc54b09dd45620999898ab2b33", size = 1927855, upload-time = "2025-09-01T07:24:33.468Z" }, - { url = "https://files.pythonhosted.org/packages/27/f8/5ce51d2fb641e25e187c10282a30f6c7f680dcc5938e0eb5670b7a08c735/grimp-3.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b363da88aa8aca5edc008c4473def9015f31d293493ca6c7e211a852b5ada6c", size = 2093246, upload-time = "2025-09-01T07:23:20.091Z" }, - { url = "https://files.pythonhosted.org/packages/09/17/217490c0d59bfcf254cb15c82d8292d6e67717cfa1b636a29f6368f59147/grimp-3.11-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dded52a319d31de2178a6e2f26da188b0974748e27af430756b3991478443b12", size = 2044921, upload-time = "2025-09-01T07:23:33.118Z" }, - { url = "https://files.pythonhosted.org/packages/04/85/54e5c723b2bd19c343c358866cc6359a38ccf980cf128ea2d7dfb5f59384/grimp-3.11-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9763b80ca072ec64384fae1ba54f18a00e88a36f527ba8dcf2e8456019e77de", size = 2195131, upload-time = "2025-09-01T07:24:14.496Z" }, - { url = "https://files.pythonhosted.org/packages/fd/15/8188cd73fff83055c1dca6e20c8315e947e2564ceaaf8b957b3ca7e1fa93/grimp-3.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e351c159834c84f723cfa1252f1b23d600072c362f4bfdc87df7eed9851004a", size = 2391156, upload-time = "2025-09-01T07:23:49.283Z" }, - { url = "https://files.pythonhosted.org/packages/c2/51/f2372c04b9b6e4628752ed9fc801bb05f968c8c4c4b28d78eb387ab96545/grimp-3.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19f2ab56e647cf65a2d6e8b2e02d5055b1a4cff72aee961cbd78afa0e9a1f698", size = 2245104, upload-time = "2025-09-01T07:24:01.54Z" }, - { url = "https://files.pythonhosted.org/packages/83/6d/bf4948b838bfc7d8c3f1da50f1bb2a8c44984af75845d41420aaa1b3f234/grimp-3.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30cc197decec63168a15c6c8a65ee8f2f095b4a7bf14244a4ed24e48b272843a", size = 2153265, upload-time = "2025-09-01T07:24:23.971Z" }, - { url = "https://files.pythonhosted.org/packages/52/18/ce2ff3f67adc286de245372b4ac163b10544635e1a86a2bc402502f1b721/grimp-3.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be27e9ecc4f8a9f96e5a09e8588b5785de289a70950b7c0c4b2bcafc96156a18", size = 2268265, upload-time = "2025-09-01T07:24:46.505Z" }, - { url = "https://files.pythonhosted.org/packages/23/b0/dc28cb7e01f578424c9efbb9a47273b14e5d3a2283197d019cbb5e6c3d4f/grimp-3.11-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab72874999a5a309a39ec91168f7e76c0acb7a81af2cc463431029202a661a5d", size = 2304895, upload-time = "2025-09-01T07:24:58.743Z" }, - { url = "https://files.pythonhosted.org/packages/9e/00/48916bf8284fc48f559ea4a9ccd47bd598493eac74dbb74c676780b664e7/grimp-3.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:55b08122a2896207ff09ffe349ad9f440a4382c092a7405191ac0512977a328f", size = 2299337, upload-time = "2025-09-01T07:25:11.886Z" }, - { url = "https://files.pythonhosted.org/packages/35/f9/6bcab18cdf1186185a6ae9abb4a5dcc43e19d46bc431becca65ac0ba1a71/grimp-3.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:54e6e5417bcd7ad44439ad1b8ef9e85f65332dcc42c9fbdbaf566da127a32d3d", size = 2322913, upload-time = "2025-09-01T07:25:24.529Z" }, - { url = "https://files.pythonhosted.org/packages/92/19/023e45fe46603172df7c55ced127bc74fcd14b8f87505ea31ea6ae9f86bc/grimp-3.11-cp312-cp312-win32.whl", hash = "sha256:41d67c29a8737b4dd7ffe11deedc6f1cfea3ce1b845a72a20c4938e8dd85b2fa", size = 1707368, upload-time = "2025-09-01T07:25:45.096Z" }, - { url = "https://files.pythonhosted.org/packages/71/ef/3cbe04829d7416f4b3c06b096ad1972622443bd11833da4d98178da22637/grimp-3.11-cp312-cp312-win_amd64.whl", hash = "sha256:c3c6fc76e1e5db2733800490ee4d46a710a5b4ac23eaa8a2313489a6e7bc60e2", size = 1811752, upload-time = "2025-09-01T07:25:38.071Z" }, - { url = "https://files.pythonhosted.org/packages/bd/6b/dca73b704e87609b4fb5170d97ae1e17fe25ffb4e8a6dee4ac21c31da9f4/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c634e77d4ee9959b618ca0526cb95d8eeaa7d716574d270fd4d880243e4e76", size = 2095005, upload-time = "2025-09-01T07:23:27.57Z" }, - { url = "https://files.pythonhosted.org/packages/35/f1/a7be1b866811eafa0798316baf988347cac10acaea1f48dbc4bc536bc82a/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:41b55e2246aed2bd2f8a6c334b5c91c737d35fec9d1c1cd86884bff1b482ab9b", size = 2046301, upload-time = "2025-09-01T07:23:41.046Z" }, - { url = "https://files.pythonhosted.org/packages/d7/c5/15071e06972f2a04ccf7c0b9f6d0cd5851a7badc59ba3df5c4036af32275/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6400eff472b205787f5fc73d2b913534c5f1ddfacd5fbcacf9b0f46e3843898", size = 2194815, upload-time = "2025-09-01T07:24:20.256Z" }, - { url = "https://files.pythonhosted.org/packages/9f/27/73a08f322adeef2a3c2d22adb7089a0e6a134dae340293be265e70471166/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ddd0db48f1168bc430adae3b5457bf32bb9c7d479791d5f9f640fe752256d65", size = 2388925, upload-time = "2025-09-01T07:23:56.658Z" }, - { url = "https://files.pythonhosted.org/packages/9d/1b/4b372addef06433b37b035006cf102bc2767c3d573916a5ce6c9b50c96f5/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e744a031841413c06bd6e118e853b1e0f2d19a5081eee7c09bb7c4c8868ca81b", size = 2242506, upload-time = "2025-09-01T07:24:09.133Z" }, - { url = "https://files.pythonhosted.org/packages/e9/2a/d618a74aa66a585ed09eebed981d71f6310ccd0c85fecdefca6a660338e3/grimp-3.11-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf5d4cbd033803ba433f445385f070759730f64f0798c75a11a3d60e7642bb9c", size = 2154028, upload-time = "2025-09-01T07:24:29.086Z" }, - { url = "https://files.pythonhosted.org/packages/2b/74/50255cc0af7b8a742d00b72ee6d825da8ce52b036260ee84d1e9e27a7fc7/grimp-3.11-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:70cf9196180226384352360ba02e1f7634e00e8e999a65087f4e7383ece78afb", size = 2270008, upload-time = "2025-09-01T07:24:53.195Z" }, - { url = "https://files.pythonhosted.org/packages/42/a0/1f441584ce68b9b818cb18f8bad2aa7bef695853f2711fb648526e0237b9/grimp-3.11-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:e5a9df811aeb2f3d764070835f9ac65f240af154ba9ba23bda7a4c4d4ad46744", size = 2306660, upload-time = "2025-09-01T07:25:06.031Z" }, - { url = "https://files.pythonhosted.org/packages/35/e9/c1b61b030b286c7c117024676d88db52cdf8b504e444430d813170a6b9f6/grimp-3.11-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:23ceffc0a19e7b85107b137435fadd3d15a3883cbe0b65d7f93f3b33a6805af7", size = 2300281, upload-time = "2025-09-01T07:25:18.5Z" }, - { url = "https://files.pythonhosted.org/packages/44/d0/124a230725e1bff859c0ad193d6e2a64d2d1273d6ae66e04138dbd0f1ca6/grimp-3.11-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e57baac1360b90b944e2fd0321b490650113e5b927d013b26e220c2889f6f275", size = 2324348, upload-time = "2025-09-01T07:25:31.409Z" }, + { url = "https://files.pythonhosted.org/packages/0f/b5/1c89600bf181d41502aed51b73b3a5889158dee35c534f51df3666779587/grimp-3.12-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e6c02e51eebfcf71146d42f47c9ce353ac1902ae446e18d0e663ab9fdaa0496c", size = 2062043, upload-time = "2025-10-09T09:49:57.035Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/bab32c5e26949a82299853ccb28ee30a7899d0355b0d209b535eb03bc04e/grimp-3.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:79bc2b0ff6072c43c0ddc4479b25b7a8198795486478cfe3be0503b2c7d32c7f", size = 1981378, upload-time = "2025-10-09T09:49:49.237Z" }, + { url = "https://files.pythonhosted.org/packages/b5/03/b9f7e465488e8593de9a1e88355c3cfba04c02c3a34a6b02cbe946e0d587/grimp-3.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3986f11a9dd4167a2943cf6e80b458c0a825b48609713736cc8f2de135000810", size = 2130579, upload-time = "2025-10-09T09:48:36.035Z" }, + { url = "https://files.pythonhosted.org/packages/1b/d0/81c776327354f32f86f321dd8468b32ba6b52dc3511d912d24c4fac96da4/grimp-3.12-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7a2abe55844f9dad25499ff9456d680496f390d160b6b3a4e5aeabc0183813b4", size = 2091201, upload-time = "2025-10-09T09:48:52.57Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7e/116ac4c1e4407a123fba4bb076b2e880643d70b3f4f1621c3323b5d66e12/grimp-3.12-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e59112d0f557335b619bcf10263d11873579230bd3df4a4b19224ec18e7212d6", size = 2240782, upload-time = "2025-10-09T09:49:30.915Z" }, + { url = "https://files.pythonhosted.org/packages/06/7f/89bbec1241a8504499975f0f08befea0cf3d27c52f9808602fff8075c639/grimp-3.12-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b858e2e5a489c36710322970aa82bfbd3f1c4107c8564960629a59d2f17a53d0", size = 2423143, upload-time = "2025-10-09T09:49:05.18Z" }, + { url = "https://files.pythonhosted.org/packages/86/d7/2f416439b624b2a91bf2e0e456f58d74d51aa7ad239099cf4a8911d952c0/grimp-3.12-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d46cc1222dd301e0be371b97f0cdecae178089704e8a285e3edd4750ec46270a", size = 2303850, upload-time = "2025-10-09T09:49:19.073Z" }, + { url = "https://files.pythonhosted.org/packages/60/bd/8c2f48c26151eb9a65bc41f01004b43cb1b31791ffb61758d40d2f6b485a/grimp-3.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef06822f75856af28e7fcc580034043c543b1c99b07d2bd467bd173a7f10691", size = 2168571, upload-time = "2025-10-09T09:49:39.844Z" }, + { url = "https://files.pythonhosted.org/packages/5a/45/01a839434ff88be24317aa52cc1ba158833bd1d071efe0da1b14838af024/grimp-3.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4c19f1cba8a95c898473dd18f9c81358019d67f87f140b0b8401550e6d21c5a3", size = 2310869, upload-time = "2025-10-09T09:50:05.153Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7b/0dc45fdc15562c2faf8a95a8685d3805d27decdef6fcfb66d9b577ed2f12/grimp-3.12-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:600e8dbc1cd9c6decbc22089730221c65591b7ba5f89751d07fc7ad014d99aa1", size = 2353397, upload-time = "2025-10-09T09:50:17.755Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ec/07734ecc4f1489ffc071417f7bc881c939bcfdfba10eb585bce510ede1b2/grimp-3.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:259ba53b82cfb9c2c2d097b2237970c4e9903fa2d0b664b7e12329d9a64924f9", size = 2350166, upload-time = "2025-10-09T09:50:32.237Z" }, + { url = "https://files.pythonhosted.org/packages/a4/f5/45d80e2fa205066a484f0c1a667a249408a49bb3b665d62677f879920aa0/grimp-3.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a593549b1f66b1c12574e71f9e8c0073b372888c6b6706e2617bba2713ae28c2", size = 2360590, upload-time = "2025-10-09T09:50:49.961Z" }, + { url = "https://files.pythonhosted.org/packages/e6/f2/7ab1bc4d613189183c17741ff0d03490d9749eb5130b8b56e82ed77098b0/grimp-3.12-cp311-cp311-win32.whl", hash = "sha256:356ee969443f06c6c3a270f5a7221f946f0cb135a8b8ece2009990b293504bb3", size = 1748183, upload-time = "2025-10-09T09:51:13.503Z" }, + { url = "https://files.pythonhosted.org/packages/91/62/195f37a68d07fab40c8934ae8e39f9ff1f9a5bf3e375059b9cf14ccba302/grimp-3.12-cp311-cp311-win_amd64.whl", hash = "sha256:75e1f0d74f3a242a1c34e464d775c36b1c8b9d8c92b35f46f221e73e9b2f0065", size = 1851099, upload-time = "2025-10-09T09:51:04.747Z" }, + { url = "https://files.pythonhosted.org/packages/12/ac/0f55980a59c07439a965d3975f1cf3a6574f7d773910b9d6924790e0dddf/grimp-3.12-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:af399fc0ffddfbd7ea6c2e8546be1ab5284ee800f15a445705bdda5d63501b34", size = 2058862, upload-time = "2025-10-09T09:49:58.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b1/5fdcb1db7cb3253c78d87a0b8c3f7f9c5214b273861300b51c897c55e6b8/grimp-3.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f08358acbaf9a4b324537bf344fd2d76b5f9b6f1bfaf9a431e9453fc0eaee5f", size = 1977586, upload-time = "2025-10-09T09:49:50.49Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b9/e5f6d265b71430f9641daa9476cde8c23549e396c558b39a0bdc7fee824f/grimp-3.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eeb1616cafe9074fcb390fcfc01e6e5a0e0ddd5acb9dd37579985b2879c239a", size = 2130610, upload-time = "2025-10-09T09:48:38.472Z" }, + { url = "https://files.pythonhosted.org/packages/da/e1/2d0601c9aac2ab7340504e85ca4cd55f2991501a03e421bec78f53a07478/grimp-3.12-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99e648e299f7cd3daaee2cb745192e7ea159c7d38df76b4dcca12a2ef68a3ede", size = 2092775, upload-time = "2025-10-09T09:48:53.841Z" }, + { url = "https://files.pythonhosted.org/packages/db/a1/e63315477127ed8f31a1a93911d084bf704d6e126ca27650e3c3389701a6/grimp-3.12-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b24c5ce351030d1f83e69acd76a06863dd87041ceb25572339f7334e210cbc4", size = 2239336, upload-time = "2025-10-09T09:49:32.185Z" }, + { url = "https://files.pythonhosted.org/packages/f2/09/cd76d35121f053a95a58fc5830756c62e5c9de74aa4e16b4dc27ce6ada2c/grimp-3.12-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd40a5ec09d1dfafaae88b53231ab79378183e2e9a03e7b26b7a30133d027d8a", size = 2421851, upload-time = "2025-10-09T09:49:06.893Z" }, + { url = "https://files.pythonhosted.org/packages/40/46/e8390a7c5ed85b4dbeff4e873f1ece8d9acf72d72f084b397ccc2facfa3b/grimp-3.12-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aebdfad66d6f4e8b0f7364ce0429d208be3510918097f969428165074d3103e", size = 2304849, upload-time = "2025-10-09T09:49:20.695Z" }, + { url = "https://files.pythonhosted.org/packages/bd/81/f73edbc48a283f634233b6153ac43e4e7b9f58108ffc19da803b0015cb60/grimp-3.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76fd06be98d6bea9ea8a804da22c80accf1d277fe04abd5f3dff05d087f056f7", size = 2168655, upload-time = "2025-10-09T09:49:41.118Z" }, + { url = "https://files.pythonhosted.org/packages/84/1a/8fa5752f725b8872010627bd10e1aedccdb406c3b4118ec3fe127155284e/grimp-3.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a73a42a43e268ac5b196386beae1ec646f4572409e731bccf2a99ab4ed5c46bf", size = 2311124, upload-time = "2025-10-09T09:50:06.477Z" }, + { url = "https://files.pythonhosted.org/packages/83/a0/02d6b2a86289a4ac73f44f59aaee43c1dc936c984204c73d2affe4570eb6/grimp-3.12-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:af990af7d5e64f484d12cdefacfaaed4ea9418ac4d0a5a928953fd91aaf8df80", size = 2354216, upload-time = "2025-10-09T09:50:19.114Z" }, + { url = "https://files.pythonhosted.org/packages/7b/48/0368289f5bbdf943a48305824b30411b35ef2c7cd8edf2bad48d67b3897e/grimp-3.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:82ee28c1e9835572af2c733f7e5913a44193c53ae8ca488039164593b4a750fa", size = 2348372, upload-time = "2025-10-09T09:50:37.479Z" }, + { url = "https://files.pythonhosted.org/packages/26/73/b4f90b4926791d720f6069fc8c8b3e204721d1db839a1c00fbcee1e2a36d/grimp-3.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afdceaea00e305909cb30d68e91b94fcf71d1a7234052549ea31148785a03a52", size = 2361167, upload-time = "2025-10-09T09:50:51.733Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ae/94d34c732d531c7165c8942d7995495aac64e9bb5c28cc6751349eacdcde/grimp-3.12-cp312-cp312-win32.whl", hash = "sha256:40f8e048254d2437dffcd383d2301a82c35d9a3082e878b707d87a6e8c539614", size = 1747179, upload-time = "2025-10-09T09:51:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cd/48bc396ee2f36e72d5c50ba8b4d7f817fc2cdac7b9ab77d2b097f50a4447/grimp-3.12-cp312-cp312-win_amd64.whl", hash = "sha256:199172d17f22199bf400a0bd5c4985784622201e887a023fe799ca3f3437dedf", size = 1850691, upload-time = "2025-10-09T09:51:05.984Z" }, + { url = "https://files.pythonhosted.org/packages/d9/31/c72e53a46692dc8358cff1af1a9494430a0fecd4c3f2d0d8e9c2eb5e828d/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:567d037a3db083e54bee621daba59a2e01fd1391364ae0a0c737995f6eed910b", size = 2131392, upload-time = "2025-10-09T09:48:46.857Z" }, + { url = "https://files.pythonhosted.org/packages/39/10/15e43be32734baaebeee090dca16f06ea5ba933b209b8e1c0d5986dabb32/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9b4cc756c91c3d8582ee70b5e013c0e34fdb31c7f808cefe9d15509c45fec31e", size = 2092481, upload-time = "2025-10-09T09:49:00.754Z" }, + { url = "https://files.pythonhosted.org/packages/a1/4a/c9349dee284c2d9384714741896f0f84a1d66011a69cdc364e4d94e188b1/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bd47f9a8619cb8966f18cb6faf5f6cb8d35ade99312477dd8e9de3a9ae4cb7", size = 2242260, upload-time = "2025-10-09T09:49:37.183Z" }, + { url = "https://files.pythonhosted.org/packages/d8/63/3935823f89c12320840bbf018858eeaca7d5285f9769a48921587a88adeb/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f30e01855c67a39857c87e6c0eafe5e8891010a35e06cf2145f2cfce8ea9780", size = 2422371, upload-time = "2025-10-09T09:49:14.616Z" }, + { url = "https://files.pythonhosted.org/packages/71/8e/5a75c2335a2dc61738b19318dcdd16392015a984211e3d0b9f6679dc6c89/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d07e825f6b052186dabd8dbbcc7e008a3b56e551725e2ba47169fe1e4bde76ac", size = 2304257, upload-time = "2025-10-09T09:49:26.908Z" }, + { url = "https://files.pythonhosted.org/packages/40/99/462d86bc9401a39859f272b867331a678f4b5324a539dc771bdae6d36309/grimp-3.12-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f1a1289d4282be2891ada75ec5d3099e856518c4236b1196e367b630485f8ce", size = 2169360, upload-time = "2025-10-09T09:49:46.575Z" }, + { url = "https://files.pythonhosted.org/packages/d0/07/6d2929f05dae189265633588819d990df35644ad74b6ec74207091dff18d/grimp-3.12-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:85136b555aeb7d3965fdb40af4e4af2011f911b0fde8c20979bf4db7b06455f5", size = 2312280, upload-time = "2025-10-09T09:50:13.491Z" }, + { url = "https://files.pythonhosted.org/packages/5c/47/7e49417e2c496da0b6141e711dca40726d2b30a0adc6db9d04b74c7bafa7/grimp-3.12-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:963efd6ec86e7b47fde835b2526b6be7a3f489857a1cd47a747c94b3e670550a", size = 2354449, upload-time = "2025-10-09T09:50:27.596Z" }, + { url = "https://files.pythonhosted.org/packages/2c/08/2e1db56797e4e26334b3ee4ef1a5fbf56155d74a0318215ed4dcad02ef43/grimp-3.12-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:c9e2ee478b66f0e20c92af6123142ffd6b604c36e9b3a8d391ea9172cc18b6b3", size = 2350545, upload-time = "2025-10-09T09:50:45.623Z" }, + { url = "https://files.pythonhosted.org/packages/37/78/53594064f11b0ae9e72b3e9df5c055f00c5bff44962f7b777846504fc50d/grimp-3.12-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e8826362d4e403aa2e03d480e3e4d64284a6b6ccafc2c5777bb2bed2535bdc4e", size = 2361926, upload-time = "2025-10-09T09:50:58.605Z" }, ] [[package]] @@ -2459,30 +2495,33 @@ wheels = [ [[package]] name = "grpcio" -version = "1.74.0" +version = "1.75.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/38/b4/35feb8f7cab7239c5b94bd2db71abb3d6adb5f335ad8f131abb6060840b6/grpcio-1.74.0.tar.gz", hash = "sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1", size = 12756048, upload-time = "2025-07-24T18:54:23.039Z" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/f7/8963848164c7604efb3a3e6ee457fdb3a469653e19002bd24742473254f8/grpcio-1.75.1.tar.gz", hash = "sha256:3e81d89ece99b9ace23a6916880baca613c03a799925afb2857887efa8b1b3d2", size = 12731327, upload-time = "2025-09-26T09:03:36.887Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/77/b2f06db9f240a5abeddd23a0e49eae2b6ac54d85f0e5267784ce02269c3b/grpcio-1.74.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31", size = 5487368, upload-time = "2025-07-24T18:53:03.548Z" }, - { url = "https://files.pythonhosted.org/packages/48/99/0ac8678a819c28d9a370a663007581744a9f2a844e32f0fa95e1ddda5b9e/grpcio-1.74.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4", size = 10999804, upload-time = "2025-07-24T18:53:05.095Z" }, - { url = "https://files.pythonhosted.org/packages/45/c6/a2d586300d9e14ad72e8dc211c7aecb45fe9846a51e558c5bca0c9102c7f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce", size = 5987667, upload-time = "2025-07-24T18:53:07.157Z" }, - { url = "https://files.pythonhosted.org/packages/c9/57/5f338bf56a7f22584e68d669632e521f0de460bb3749d54533fc3d0fca4f/grpcio-1.74.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3", size = 6655612, upload-time = "2025-07-24T18:53:09.244Z" }, - { url = "https://files.pythonhosted.org/packages/82/ea/a4820c4c44c8b35b1903a6c72a5bdccec92d0840cf5c858c498c66786ba5/grpcio-1.74.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182", size = 6219544, upload-time = "2025-07-24T18:53:11.221Z" }, - { url = "https://files.pythonhosted.org/packages/a4/17/0537630a921365928f5abb6d14c79ba4dcb3e662e0dbeede8af4138d9dcf/grpcio-1.74.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d", size = 6334863, upload-time = "2025-07-24T18:53:12.925Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a6/85ca6cb9af3f13e1320d0a806658dca432ff88149d5972df1f7b51e87127/grpcio-1.74.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f", size = 7019320, upload-time = "2025-07-24T18:53:15.002Z" }, - { url = "https://files.pythonhosted.org/packages/4f/a7/fe2beab970a1e25d2eff108b3cf4f7d9a53c185106377a3d1989216eba45/grpcio-1.74.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4", size = 6514228, upload-time = "2025-07-24T18:53:16.999Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c2/2f9c945c8a248cebc3ccda1b7a1bf1775b9d7d59e444dbb18c0014e23da6/grpcio-1.74.0-cp311-cp311-win32.whl", hash = "sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b", size = 3817216, upload-time = "2025-07-24T18:53:20.564Z" }, - { url = "https://files.pythonhosted.org/packages/ff/d1/a9cf9c94b55becda2199299a12b9feef0c79946b0d9d34c989de6d12d05d/grpcio-1.74.0-cp311-cp311-win_amd64.whl", hash = "sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11", size = 4495380, upload-time = "2025-07-24T18:53:22.058Z" }, - { url = "https://files.pythonhosted.org/packages/4c/5d/e504d5d5c4469823504f65687d6c8fb97b7f7bf0b34873b7598f1df24630/grpcio-1.74.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8", size = 5445551, upload-time = "2025-07-24T18:53:23.641Z" }, - { url = "https://files.pythonhosted.org/packages/43/01/730e37056f96f2f6ce9f17999af1556df62ee8dab7fa48bceeaab5fd3008/grpcio-1.74.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6", size = 10979810, upload-time = "2025-07-24T18:53:25.349Z" }, - { url = "https://files.pythonhosted.org/packages/79/3d/09fd100473ea5c47083889ca47ffd356576173ec134312f6aa0e13111dee/grpcio-1.74.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5", size = 5941946, upload-time = "2025-07-24T18:53:27.387Z" }, - { url = "https://files.pythonhosted.org/packages/8a/99/12d2cca0a63c874c6d3d195629dcd85cdf5d6f98a30d8db44271f8a97b93/grpcio-1.74.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49", size = 6621763, upload-time = "2025-07-24T18:53:29.193Z" }, - { url = "https://files.pythonhosted.org/packages/9d/2c/930b0e7a2f1029bbc193443c7bc4dc2a46fedb0203c8793dcd97081f1520/grpcio-1.74.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7", size = 6180664, upload-time = "2025-07-24T18:53:30.823Z" }, - { url = "https://files.pythonhosted.org/packages/db/d5/ff8a2442180ad0867717e670f5ec42bfd8d38b92158ad6bcd864e6d4b1ed/grpcio-1.74.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3", size = 6301083, upload-time = "2025-07-24T18:53:32.454Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ba/b361d390451a37ca118e4ec7dccec690422e05bc85fba2ec72b06cefec9f/grpcio-1.74.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707", size = 6994132, upload-time = "2025-07-24T18:53:34.506Z" }, - { url = "https://files.pythonhosted.org/packages/3b/0c/3a5fa47d2437a44ced74141795ac0251bbddeae74bf81df3447edd767d27/grpcio-1.74.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b", size = 6489616, upload-time = "2025-07-24T18:53:36.217Z" }, - { url = "https://files.pythonhosted.org/packages/ae/95/ab64703b436d99dc5217228babc76047d60e9ad14df129e307b5fec81fd0/grpcio-1.74.0-cp312-cp312-win32.whl", hash = "sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c", size = 3807083, upload-time = "2025-07-24T18:53:37.911Z" }, - { url = "https://files.pythonhosted.org/packages/84/59/900aa2445891fc47a33f7d2f76e00ca5d6ae6584b20d19af9c06fa09bf9a/grpcio-1.74.0-cp312-cp312-win_amd64.whl", hash = "sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc", size = 4490123, upload-time = "2025-07-24T18:53:39.528Z" }, + { url = "https://files.pythonhosted.org/packages/0c/3c/35ca9747473a306bfad0cee04504953f7098527cd112a4ab55c55af9e7bd/grpcio-1.75.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:573855ca2e58e35032aff30bfbd1ee103fbcf4472e4b28d4010757700918e326", size = 5709761, upload-time = "2025-09-26T09:01:28.528Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2c/ecbcb4241e4edbe85ac2663f885726fea0e947767401288b50d8fdcb9200/grpcio-1.75.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:6a4996a2c8accc37976dc142d5991adf60733e223e5c9a2219e157dc6a8fd3a2", size = 11496691, upload-time = "2025-09-26T09:01:31.214Z" }, + { url = "https://files.pythonhosted.org/packages/81/40/bc07aee2911f0d426fa53fe636216100c31a8ea65a400894f280274cb023/grpcio-1.75.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b1ea1bbe77ecbc1be00af2769f4ae4a88ce93be57a4f3eebd91087898ed749f9", size = 6296084, upload-time = "2025-09-26T09:01:34.596Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d1/10c067f6c67396cbf46448b80f27583b5e8c4b46cdfbe18a2a02c2c2f290/grpcio-1.75.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e5b425aee54cc5e3e3c58f00731e8a33f5567965d478d516d35ef99fd648ab68", size = 6950403, upload-time = "2025-09-26T09:01:36.736Z" }, + { url = "https://files.pythonhosted.org/packages/3f/42/5f628abe360b84dfe8dd8f32be6b0606dc31dc04d3358eef27db791ea4d5/grpcio-1.75.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0049a7bf547dafaeeb1db17079ce79596c298bfe308fc084d023c8907a845b9a", size = 6470166, upload-time = "2025-09-26T09:01:39.474Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/a24035080251324019882ee2265cfde642d6476c0cf8eb207fc693fcebdc/grpcio-1.75.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b8ea230c7f77c0a1a3208a04a1eda164633fb0767b4cefd65a01079b65e5b1f", size = 7107828, upload-time = "2025-09-26T09:01:41.782Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/d18b984c1c9ba0318e3628dbbeb6af77a5007f02abc378c845070f2d3edd/grpcio-1.75.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:36990d629c3c9fb41e546414e5af52d0a7af37ce7113d9682c46d7e2919e4cca", size = 8045421, upload-time = "2025-09-26T09:01:45.835Z" }, + { url = "https://files.pythonhosted.org/packages/7e/b6/4bf9aacff45deca5eac5562547ed212556b831064da77971a4e632917da3/grpcio-1.75.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b10ad908118d38c2453ade7ff790e5bce36580c3742919007a2a78e3a1e521ca", size = 7503290, upload-time = "2025-09-26T09:01:49.28Z" }, + { url = "https://files.pythonhosted.org/packages/3b/15/d8d69d10223cb54c887a2180bd29fe5fa2aec1d4995c8821f7aa6eaf72e4/grpcio-1.75.1-cp311-cp311-win32.whl", hash = "sha256:d6be2b5ee7bea656c954dcf6aa8093c6f0e6a3ef9945c99d99fcbfc88c5c0bfe", size = 3950631, upload-time = "2025-09-26T09:01:51.23Z" }, + { url = "https://files.pythonhosted.org/packages/8a/40/7b8642d45fff6f83300c24eaac0380a840e5e7fe0e8d80afd31b99d7134e/grpcio-1.75.1-cp311-cp311-win_amd64.whl", hash = "sha256:61c692fb05956b17dd6d1ab480f7f10ad0536dba3bc8fd4e3c7263dc244ed772", size = 4646131, upload-time = "2025-09-26T09:01:53.266Z" }, + { url = "https://files.pythonhosted.org/packages/3a/81/42be79e73a50aaa20af66731c2defeb0e8c9008d9935a64dd8ea8e8c44eb/grpcio-1.75.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:7b888b33cd14085d86176b1628ad2fcbff94cfbbe7809465097aa0132e58b018", size = 5668314, upload-time = "2025-09-26T09:01:55.424Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/3686ed15822fedc58c22f82b3a7403d9faf38d7c33de46d4de6f06e49426/grpcio-1.75.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8775036efe4ad2085975531d221535329f5dac99b6c2a854a995456098f99546", size = 11476125, upload-time = "2025-09-26T09:01:57.927Z" }, + { url = "https://files.pythonhosted.org/packages/14/85/21c71d674f03345ab183c634ecd889d3330177e27baea8d5d247a89b6442/grpcio-1.75.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb658f703468d7fbb5dcc4037c65391b7dc34f808ac46ed9136c24fc5eeb041d", size = 6246335, upload-time = "2025-09-26T09:02:00.76Z" }, + { url = "https://files.pythonhosted.org/packages/fd/db/3beb661bc56a385ae4fa6b0e70f6b91ac99d47afb726fe76aaff87ebb116/grpcio-1.75.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4b7177a1cdb3c51b02b0c0a256b0a72fdab719600a693e0e9037949efffb200b", size = 6916309, upload-time = "2025-09-26T09:02:02.894Z" }, + { url = "https://files.pythonhosted.org/packages/1e/9c/eda9fe57f2b84343d44c1b66cf3831c973ba29b078b16a27d4587a1fdd47/grpcio-1.75.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7d4fa6ccc3ec2e68a04f7b883d354d7fea22a34c44ce535a2f0c0049cf626ddf", size = 6435419, upload-time = "2025-09-26T09:02:05.055Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b8/090c98983e0a9d602e3f919a6e2d4e470a8b489452905f9a0fa472cac059/grpcio-1.75.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d86880ecaeb5b2f0a8afa63824de93adb8ebe4e49d0e51442532f4e08add7d6", size = 7064893, upload-time = "2025-09-26T09:02:07.275Z" }, + { url = "https://files.pythonhosted.org/packages/ec/c0/6d53d4dbbd00f8bd81571f5478d8a95528b716e0eddb4217cc7cb45aae5f/grpcio-1.75.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a8041d2f9e8a742aeae96f4b047ee44e73619f4f9d24565e84d5446c623673b6", size = 8011922, upload-time = "2025-09-26T09:02:09.527Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7c/48455b2d0c5949678d6982c3e31ea4d89df4e16131b03f7d5c590811cbe9/grpcio-1.75.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3652516048bf4c314ce12be37423c79829f46efffb390ad64149a10c6071e8de", size = 7466181, upload-time = "2025-09-26T09:02:12.279Z" }, + { url = "https://files.pythonhosted.org/packages/fd/12/04a0e79081e3170b6124f8cba9b6275871276be06c156ef981033f691880/grpcio-1.75.1-cp312-cp312-win32.whl", hash = "sha256:44b62345d8403975513af88da2f3d5cc76f73ca538ba46596f92a127c2aea945", size = 3938543, upload-time = "2025-09-26T09:02:14.77Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d7/11350d9d7fb5adc73d2b0ebf6ac1cc70135577701e607407fe6739a90021/grpcio-1.75.1-cp312-cp312-win_amd64.whl", hash = "sha256:b1e191c5c465fa777d4cafbaacf0c01e0d5278022082c0abbd2ee1d6454ed94d", size = 4641938, upload-time = "2025-09-26T09:02:16.927Z" }, ] [[package]] @@ -2564,17 +2603,17 @@ wheels = [ [[package]] name = "hf-xet" -version = "1.1.9" +version = "1.1.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/0f/5b60fc28ee7f8cc17a5114a584fd6b86e11c3e0a6e142a7f97a161e9640a/hf_xet-1.1.9.tar.gz", hash = "sha256:c99073ce404462e909f1d5839b2d14a3827b8fe75ed8aed551ba6609c026c803", size = 484242, upload-time = "2025-08-27T23:05:19.441Z" } +sdist = { url = "https://files.pythonhosted.org/packages/74/31/feeddfce1748c4a233ec1aa5b7396161c07ae1aa9b7bdbc9a72c3c7dd768/hf_xet-1.1.10.tar.gz", hash = "sha256:408aef343800a2102374a883f283ff29068055c111f003ff840733d3b715bb97", size = 487910, upload-time = "2025-09-12T20:10:27.12Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/12/56e1abb9a44cdef59a411fe8a8673313195711b5ecce27880eb9c8fa90bd/hf_xet-1.1.9-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:a3b6215f88638dd7a6ff82cb4e738dcbf3d863bf667997c093a3c990337d1160", size = 2762553, upload-time = "2025-08-27T23:05:15.153Z" }, - { url = "https://files.pythonhosted.org/packages/3a/e6/2d0d16890c5f21b862f5df3146519c182e7f0ae49b4b4bf2bd8a40d0b05e/hf_xet-1.1.9-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:9b486de7a64a66f9a172f4b3e0dfe79c9f0a93257c501296a2521a13495a698a", size = 2623216, upload-time = "2025-08-27T23:05:13.778Z" }, - { url = "https://files.pythonhosted.org/packages/81/42/7e6955cf0621e87491a1fb8cad755d5c2517803cea174229b0ec00ff0166/hf_xet-1.1.9-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c5a840c2c4e6ec875ed13703a60e3523bc7f48031dfd750923b2a4d1a5fc3c", size = 3186789, upload-time = "2025-08-27T23:05:12.368Z" }, - { url = "https://files.pythonhosted.org/packages/df/8b/759233bce05457f5f7ec062d63bbfd2d0c740b816279eaaa54be92aa452a/hf_xet-1.1.9-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:96a6139c9e44dad1c52c52520db0fffe948f6bce487cfb9d69c125f254bb3790", size = 3088747, upload-time = "2025-08-27T23:05:10.439Z" }, - { url = "https://files.pythonhosted.org/packages/6c/3c/28cc4db153a7601a996985bcb564f7b8f5b9e1a706c7537aad4b4809f358/hf_xet-1.1.9-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ad1022e9a998e784c97b2173965d07fe33ee26e4594770b7785a8cc8f922cd95", size = 3251429, upload-time = "2025-08-27T23:05:16.471Z" }, - { url = "https://files.pythonhosted.org/packages/84/17/7caf27a1d101bfcb05be85850d4aa0a265b2e1acc2d4d52a48026ef1d299/hf_xet-1.1.9-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:86754c2d6d5afb11b0a435e6e18911a4199262fe77553f8c50d75e21242193ea", size = 3354643, upload-time = "2025-08-27T23:05:17.828Z" }, - { url = "https://files.pythonhosted.org/packages/cd/50/0c39c9eed3411deadcc98749a6699d871b822473f55fe472fad7c01ec588/hf_xet-1.1.9-cp37-abi3-win_amd64.whl", hash = "sha256:5aad3933de6b725d61d51034e04174ed1dce7a57c63d530df0014dea15a40127", size = 2804797, upload-time = "2025-08-27T23:05:20.77Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a2/343e6d05de96908366bdc0081f2d8607d61200be2ac802769c4284cc65bd/hf_xet-1.1.10-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:686083aca1a6669bc85c21c0563551cbcdaa5cf7876a91f3d074a030b577231d", size = 2761466, upload-time = "2025-09-12T20:10:22.836Z" }, + { url = "https://files.pythonhosted.org/packages/31/f9/6215f948ac8f17566ee27af6430ea72045e0418ce757260248b483f4183b/hf_xet-1.1.10-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:71081925383b66b24eedff3013f8e6bbd41215c3338be4b94ba75fd75b21513b", size = 2623807, upload-time = "2025-09-12T20:10:21.118Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/86397573efefff941e100367bbda0b21496ffcdb34db7ab51912994c32a2/hf_xet-1.1.10-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6bceb6361c80c1cc42b5a7b4e3efd90e64630bcf11224dcac50ef30a47e435", size = 3186960, upload-time = "2025-09-12T20:10:19.336Z" }, + { url = "https://files.pythonhosted.org/packages/01/a7/0b2e242b918cc30e1f91980f3c4b026ff2eedaf1e2ad96933bca164b2869/hf_xet-1.1.10-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eae7c1fc8a664e54753ffc235e11427ca61f4b0477d757cc4eb9ae374b69f09c", size = 3087167, upload-time = "2025-09-12T20:10:17.255Z" }, + { url = "https://files.pythonhosted.org/packages/4a/25/3e32ab61cc7145b11eee9d745988e2f0f4fafda81b25980eebf97d8cff15/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0a0005fd08f002180f7a12d4e13b22be277725bc23ed0529f8add5c7a6309c06", size = 3248612, upload-time = "2025-09-12T20:10:24.093Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3d/ab7109e607ed321afaa690f557a9ada6d6d164ec852fd6bf9979665dc3d6/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f900481cf6e362a6c549c61ff77468bd59d6dd082f3170a36acfef2eb6a6793f", size = 3353360, upload-time = "2025-09-12T20:10:25.563Z" }, + { url = "https://files.pythonhosted.org/packages/ee/0e/471f0a21db36e71a2f1752767ad77e92d8cde24e974e03d662931b1305ec/hf_xet-1.1.10-cp37-abi3-win_amd64.whl", hash = "sha256:5f54b19cc347c13235ae7ee98b330c26dd65ef1df47e5316ffb1e87713ca7045", size = 2804691, upload-time = "2025-09-12T20:10:28.433Z" }, ] [[package]] @@ -2664,24 +2703,24 @@ wheels = [ [[package]] name = "httptools" -version = "0.6.4" +version = "0.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069", size = 199029, upload-time = "2024-10-16T19:44:18.427Z" }, - { url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a", size = 103492, upload-time = "2024-10-16T19:44:19.515Z" }, - { url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975", size = 462891, upload-time = "2024-10-16T19:44:21.067Z" }, - { url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636", size = 459788, upload-time = "2024-10-16T19:44:22.958Z" }, - { url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721", size = 433214, upload-time = "2024-10-16T19:44:24.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988", size = 434120, upload-time = "2024-10-16T19:44:26.295Z" }, - { url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17", size = 88565, upload-time = "2024-10-16T19:44:29.188Z" }, - { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload-time = "2024-10-16T19:44:30.175Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload-time = "2024-10-16T19:44:31.786Z" }, - { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload-time = "2024-10-16T19:44:32.825Z" }, - { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837, upload-time = "2024-10-16T19:44:33.974Z" }, - { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289, upload-time = "2024-10-16T19:44:35.111Z" }, - { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779, upload-time = "2024-10-16T19:44:36.253Z" }, - { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634, upload-time = "2024-10-16T19:44:37.357Z" }, + { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" }, + { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" }, + { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" }, + { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" }, + { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, + { url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" }, + { url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" }, + { url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" }, ] [[package]] @@ -2710,16 +2749,16 @@ socks = [ [[package]] name = "httpx-sse" -version = "0.4.1" +version = "0.4.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, + { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" }, ] [[package]] name = "huggingface-hub" -version = "0.34.4" +version = "0.35.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -2731,9 +2770,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/c9/bdbe19339f76d12985bc03572f330a01a93c04dffecaaea3061bdd7fb892/huggingface_hub-0.34.4.tar.gz", hash = "sha256:a4228daa6fb001be3f4f4bdaf9a0db00e1739235702848df00885c9b5742c85c", size = 459768, upload-time = "2025-08-08T09:14:52.365Z" } +sdist = { url = "https://files.pythonhosted.org/packages/10/7e/a0a97de7c73671863ca6b3f61fa12518caf35db37825e43d63a70956738c/huggingface_hub-0.35.3.tar.gz", hash = "sha256:350932eaa5cc6a4747efae85126ee220e4ef1b54e29d31c3b45c5612ddf0b32a", size = 461798, upload-time = "2025-09-29T14:29:58.625Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/7b/bb06b061991107cd8783f300adff3e7b7f284e330fd82f507f2a1417b11d/huggingface_hub-0.34.4-py3-none-any.whl", hash = "sha256:9b365d781739c93ff90c359844221beef048403f1bc1f1c123c191257c3c890a", size = 561452, upload-time = "2025-08-08T09:14:50.159Z" }, + { url = "https://files.pythonhosted.org/packages/31/a0/651f93d154cb72323358bf2bbae3e642bdb5d2f1bfc874d096f7cb159fa0/huggingface_hub-0.35.3-py3-none-any.whl", hash = "sha256:0e3a01829c19d86d03793e4577816fe3bdfc1602ac62c7fb220d593d351224ba", size = 564262, upload-time = "2025-09-29T14:29:55.813Z" }, ] [[package]] @@ -2759,38 +2798,38 @@ wheels = [ [[package]] name = "hypothesis" -version = "6.138.15" +version = "6.140.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "sortedcontainers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3b/68/adc338edec178cf6c08b4843ea2b2d639d47bed4b06ea9331433b71acc0a/hypothesis-6.138.15.tar.gz", hash = "sha256:6b0e1aa182eacde87110995a3543530d69ef411f642162a656efcd46c2823ad1", size = 466116, upload-time = "2025-09-08T05:34:15.956Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/7f/946343e32881b56adc0eba64e428ad2f85251f9ef16e3e4ec1b6ab80199b/hypothesis-6.140.3.tar.gz", hash = "sha256:4f4a09bf77af21e0cc3dffed1ea639812dc75d38f81308ec9fb0e33f8557b0cb", size = 466925, upload-time = "2025-10-04T22:29:44.499Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/49/911eb0cd17884a7a6f510e78acf0a70592e414d194695a0c7c1db91645b2/hypothesis-6.138.15-py3-none-any.whl", hash = "sha256:b7cf743d461c319eb251a13c8e1dcf00f4ef7085e4ab5bf5abf102b2a5ffd694", size = 533621, upload-time = "2025-09-08T05:34:12.272Z" }, + { url = "https://files.pythonhosted.org/packages/65/2a/0553ac2a8af432df92f2ffc05ca97e7ed64e00c97a371b019ae2690de325/hypothesis-6.140.3-py3-none-any.whl", hash = "sha256:a2cfff51641a58a56081f5c90ae1da6ccf3d043404f411805f7f0e0d75742d0e", size = 534534, upload-time = "2025-10-04T22:29:40.635Z" }, ] [[package]] name = "idna" -version = "3.10" +version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] [[package]] name = "import-linter" -version = "2.4" +version = "2.5.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "grimp" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/db/33/e3c29beb4d8a33cfacdbe2858a3a4533694a0c1d0c060daaa761eff6d929/import_linter-2.4.tar.gz", hash = "sha256:4888fde83dd18bdbecd57ea1a98a1f3d52c6b6507d700f89f8678b44306c0ab4", size = 29942, upload-time = "2025-08-15T06:57:23.423Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/fd/49913b98fdeb5a8a120ca756abfc9aa7fdef7c20da1d728173e98ce11160/import_linter-2.5.2.tar.gz", hash = "sha256:d8f2dc6432975cc35edc4cc0bfcf1b811f05500b377ce0c3f62729d68f46c698", size = 159664, upload-time = "2025-10-09T10:53:24.635Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/11/2c108fc1138e506762db332c4a7ebc589cb379bc443939a81ec738b4cf73/import_linter-2.4-py3-none-any.whl", hash = "sha256:2ad6d5a164cdcd5ebdda4172cf0169f73dde1a8925ef7216672c321cd38f8499", size = 42355, upload-time = "2025-08-15T06:57:22.221Z" }, + { url = "https://files.pythonhosted.org/packages/c1/f4/f20eeb9e6ab178ce011457cd936877202556f14b7af3ef2b3c3e26f3758a/import_linter-2.5.2-py3-none-any.whl", hash = "sha256:a70b64c2451dc6b96ff9ef5af4e3f6a2c8b63532a66a3c96a7c31ca086b10003", size = 44140, upload-time = "2025-10-09T10:53:23.367Z" }, ] [[package]] @@ -2870,34 +2909,35 @@ wheels = [ [[package]] name = "jiter" -version = "0.10.0" +version = "0.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759, upload-time = "2025-05-18T19:04:59.73Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094, upload-time = "2025-09-15T09:20:38.212Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/dd/6cefc6bd68b1c3c979cecfa7029ab582b57690a31cd2f346c4d0ce7951b6/jiter-0.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3bebe0c558e19902c96e99217e0b8e8b17d570906e72ed8a87170bc290b1e978", size = 317473, upload-time = "2025-05-18T19:03:25.942Z" }, - { url = "https://files.pythonhosted.org/packages/be/cf/fc33f5159ce132be1d8dd57251a1ec7a631c7df4bd11e1cd198308c6ae32/jiter-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:558cc7e44fd8e507a236bee6a02fa17199ba752874400a0ca6cd6e2196cdb7dc", size = 321971, upload-time = "2025-05-18T19:03:27.255Z" }, - { url = "https://files.pythonhosted.org/packages/68/a4/da3f150cf1d51f6c472616fb7650429c7ce053e0c962b41b68557fdf6379/jiter-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d613e4b379a07d7c8453c5712ce7014e86c6ac93d990a0b8e7377e18505e98d", size = 345574, upload-time = "2025-05-18T19:03:28.63Z" }, - { url = "https://files.pythonhosted.org/packages/84/34/6e8d412e60ff06b186040e77da5f83bc158e9735759fcae65b37d681f28b/jiter-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f62cf8ba0618eda841b9bf61797f21c5ebd15a7a1e19daab76e4e4b498d515b2", size = 371028, upload-time = "2025-05-18T19:03:30.292Z" }, - { url = "https://files.pythonhosted.org/packages/fb/d9/9ee86173aae4576c35a2f50ae930d2ccb4c4c236f6cb9353267aa1d626b7/jiter-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:919d139cdfa8ae8945112398511cb7fca58a77382617d279556b344867a37e61", size = 491083, upload-time = "2025-05-18T19:03:31.654Z" }, - { url = "https://files.pythonhosted.org/packages/d9/2c/f955de55e74771493ac9e188b0f731524c6a995dffdcb8c255b89c6fb74b/jiter-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ddbc6ae311175a3b03bd8994881bc4635c923754932918e18da841632349db", size = 388821, upload-time = "2025-05-18T19:03:33.184Z" }, - { url = "https://files.pythonhosted.org/packages/81/5a/0e73541b6edd3f4aada586c24e50626c7815c561a7ba337d6a7eb0a915b4/jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c440ea003ad10927a30521a9062ce10b5479592e8a70da27f21eeb457b4a9c5", size = 352174, upload-time = "2025-05-18T19:03:34.965Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c0/61eeec33b8c75b31cae42be14d44f9e6fe3ac15a4e58010256ac3abf3638/jiter-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc347c87944983481e138dea467c0551080c86b9d21de6ea9306efb12ca8f606", size = 391869, upload-time = "2025-05-18T19:03:36.436Z" }, - { url = "https://files.pythonhosted.org/packages/41/22/5beb5ee4ad4ef7d86f5ea5b4509f680a20706c4a7659e74344777efb7739/jiter-0.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:13252b58c1f4d8c5b63ab103c03d909e8e1e7842d302473f482915d95fefd605", size = 523741, upload-time = "2025-05-18T19:03:38.168Z" }, - { url = "https://files.pythonhosted.org/packages/ea/10/768e8818538e5817c637b0df52e54366ec4cebc3346108a4457ea7a98f32/jiter-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d1bbf3c465de4a24ab12fb7766a0003f6f9bce48b8b6a886158c4d569452dc5", size = 514527, upload-time = "2025-05-18T19:03:39.577Z" }, - { url = "https://files.pythonhosted.org/packages/73/6d/29b7c2dc76ce93cbedabfd842fc9096d01a0550c52692dfc33d3cc889815/jiter-0.10.0-cp311-cp311-win32.whl", hash = "sha256:db16e4848b7e826edca4ccdd5b145939758dadf0dc06e7007ad0e9cfb5928ae7", size = 210765, upload-time = "2025-05-18T19:03:41.271Z" }, - { url = "https://files.pythonhosted.org/packages/c2/c9/d394706deb4c660137caf13e33d05a031d734eb99c051142e039d8ceb794/jiter-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c9c1d5f10e18909e993f9641f12fe1c77b3e9b533ee94ffa970acc14ded3812", size = 209234, upload-time = "2025-05-18T19:03:42.918Z" }, - { url = "https://files.pythonhosted.org/packages/6d/b5/348b3313c58f5fbfb2194eb4d07e46a35748ba6e5b3b3046143f3040bafa/jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b", size = 312262, upload-time = "2025-05-18T19:03:44.637Z" }, - { url = "https://files.pythonhosted.org/packages/9c/4a/6a2397096162b21645162825f058d1709a02965606e537e3304b02742e9b/jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744", size = 320124, upload-time = "2025-05-18T19:03:46.341Z" }, - { url = "https://files.pythonhosted.org/packages/2a/85/1ce02cade7516b726dd88f59a4ee46914bf79d1676d1228ef2002ed2f1c9/jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2", size = 345330, upload-time = "2025-05-18T19:03:47.596Z" }, - { url = "https://files.pythonhosted.org/packages/75/d0/bb6b4f209a77190ce10ea8d7e50bf3725fc16d3372d0a9f11985a2b23eff/jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026", size = 369670, upload-time = "2025-05-18T19:03:49.334Z" }, - { url = "https://files.pythonhosted.org/packages/a0/f5/a61787da9b8847a601e6827fbc42ecb12be2c925ced3252c8ffcb56afcaf/jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c", size = 489057, upload-time = "2025-05-18T19:03:50.66Z" }, - { url = "https://files.pythonhosted.org/packages/12/e4/6f906272810a7b21406c760a53aadbe52e99ee070fc5c0cb191e316de30b/jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959", size = 389372, upload-time = "2025-05-18T19:03:51.98Z" }, - { url = "https://files.pythonhosted.org/packages/e2/ba/77013b0b8ba904bf3762f11e0129b8928bff7f978a81838dfcc958ad5728/jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a", size = 352038, upload-time = "2025-05-18T19:03:53.703Z" }, - { url = "https://files.pythonhosted.org/packages/67/27/c62568e3ccb03368dbcc44a1ef3a423cb86778a4389e995125d3d1aaa0a4/jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95", size = 391538, upload-time = "2025-05-18T19:03:55.046Z" }, - { url = "https://files.pythonhosted.org/packages/c0/72/0d6b7e31fc17a8fdce76164884edef0698ba556b8eb0af9546ae1a06b91d/jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea", size = 523557, upload-time = "2025-05-18T19:03:56.386Z" }, - { url = "https://files.pythonhosted.org/packages/2f/09/bc1661fbbcbeb6244bd2904ff3a06f340aa77a2b94e5a7373fd165960ea3/jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b", size = 514202, upload-time = "2025-05-18T19:03:57.675Z" }, - { url = "https://files.pythonhosted.org/packages/1b/84/5a5d5400e9d4d54b8004c9673bbe4403928a00d28529ff35b19e9d176b19/jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01", size = 211781, upload-time = "2025-05-18T19:03:59.025Z" }, - { url = "https://files.pythonhosted.org/packages/9b/52/7ec47455e26f2d6e5f2ea4951a0652c06e5b995c291f723973ae9e724a65/jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49", size = 206176, upload-time = "2025-05-18T19:04:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/38/55/a69fefeef09c2eaabae44b935a1aa81517e49639c0a0c25d861cb18cd7ac/jiter-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cb5d9db02979c3f49071fce51a48f4b4e4cf574175fb2b11c7a535fa4867b222", size = 309503, upload-time = "2025-09-15T09:19:08.191Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d5/a6aba9e6551f32f9c127184f398208e4eddb96c59ac065c8a92056089d28/jiter-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1dc6a123f3471c4730db7ca8ba75f1bb3dcb6faeb8d46dd781083e7dee88b32d", size = 317688, upload-time = "2025-09-15T09:19:09.918Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f3/5e86f57c1883971cdc8535d0429c2787bf734840a231da30a3be12850562/jiter-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09858f8d230f031c7b8e557429102bf050eea29c77ad9c34c8fe253c5329acb7", size = 337418, upload-time = "2025-09-15T09:19:11.078Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/a71d8a24c2a70664970574a8e0b766663f5ef788f7fe1cc20ee0c016d488/jiter-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbe2196c4a0ce760925a74ab4456bf644748ab0979762139626ad138f6dac72d", size = 361423, upload-time = "2025-09-15T09:19:13.286Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e5/b09076f4e7fd9471b91e16f9f3dc7330b161b738f3b39b2c37054a36e26a/jiter-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5beb56d22b63647bafd0b74979216fdee80c580c0c63410be8c11053860ffd09", size = 486367, upload-time = "2025-09-15T09:19:14.546Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f1/98cb3a36f5e62f80cd860f0179f948d9eab5a316d55d3e1bab98d9767af5/jiter-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97025d09ef549795d8dc720a824312cee3253c890ac73c621721ddfc75066789", size = 376335, upload-time = "2025-09-15T09:19:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d8/ec74886497ea393c29dbd7651ddecc1899e86404a6b1f84a3ddab0ab59fd/jiter-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50880a6da65d8c23a2cf53c412847d9757e74cc9a3b95c5704a1d1a24667347", size = 348981, upload-time = "2025-09-15T09:19:17.568Z" }, + { url = "https://files.pythonhosted.org/packages/24/93/d22ad7fa3b86ade66c86153ceea73094fc2af8b20c59cb7fceab9fea4704/jiter-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:452d80a1c86c095a242007bd9fc5d21b8a8442307193378f891cb8727e469648", size = 385797, upload-time = "2025-09-15T09:19:19.121Z" }, + { url = "https://files.pythonhosted.org/packages/c8/bd/e25ff4a4df226e9b885f7cb01ee4b9dc74e3000e612d6f723860d71a1f34/jiter-0.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e84e58198d4894668eec2da660ffff60e0f3e60afa790ecc50cb12b0e02ca1d4", size = 516597, upload-time = "2025-09-15T09:19:20.301Z" }, + { url = "https://files.pythonhosted.org/packages/be/fb/beda613db7d93ffa2fdd2683f90f2f5dce8daf4bc2d0d2829e7de35308c6/jiter-0.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df64edcfc5dd5279a791eea52aa113d432c933119a025b0b5739f90d2e4e75f1", size = 508853, upload-time = "2025-09-15T09:19:22.075Z" }, + { url = "https://files.pythonhosted.org/packages/20/64/c5b0d93490634e41e38e2a15de5d54fdbd2c9f64a19abb0f95305b63373c/jiter-0.11.0-cp311-cp311-win32.whl", hash = "sha256:144fc21337d21b1d048f7f44bf70881e1586401d405ed3a98c95a114a9994982", size = 205140, upload-time = "2025-09-15T09:19:23.351Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e6/c347c0e6f5796e97d4356b7e5ff0ce336498b7f4ef848fae621a56f1ccf3/jiter-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:b0f32e644d241293b892b1a6dd8f0b9cc029bfd94c97376b2681c36548aabab7", size = 204311, upload-time = "2025-09-15T09:19:24.591Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b5/3009b112b8f673e568ef79af9863d8309a15f0a8cdcc06ed6092051f377e/jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada", size = 305510, upload-time = "2025-09-15T09:19:25.893Z" }, + { url = "https://files.pythonhosted.org/packages/fe/82/15514244e03b9e71e086bbe2a6de3e4616b48f07d5f834200c873956fb8c/jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99", size = 316521, upload-time = "2025-09-15T09:19:27.525Z" }, + { url = "https://files.pythonhosted.org/packages/92/94/7a2e905f40ad2d6d660e00b68d818f9e29fb87ffe82774f06191e93cbe4a/jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6", size = 338214, upload-time = "2025-09-15T09:19:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9c/5791ed5bdc76f12110158d3316a7a3ec0b1413d018b41c5ed399549d3ad5/jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1", size = 361280, upload-time = "2025-09-15T09:19:30.013Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7f/b7d82d77ff0d2cb06424141000176b53a9e6b16a1125525bb51ea4990c2e/jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4", size = 487895, upload-time = "2025-09-15T09:19:31.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/44/10a1475d46f1fc1fd5cc2e82c58e7bca0ce5852208e0fa5df2f949353321/jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72", size = 378421, upload-time = "2025-09-15T09:19:32.746Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5f/0dc34563d8164d31d07bc09d141d3da08157a68dcd1f9b886fa4e917805b/jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591", size = 347932, upload-time = "2025-09-15T09:19:34.612Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/b68f32a4fcb7b4a682b37c73a0e5dae32180140cd1caf11aef6ad40ddbf2/jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09", size = 386959, upload-time = "2025-09-15T09:19:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/c08c92e713b6e28972a846a81ce374883dac2f78ec6f39a0dad9f2339c3a/jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5", size = 517187, upload-time = "2025-09-15T09:19:37.426Z" }, + { url = "https://files.pythonhosted.org/packages/89/b5/4a283bec43b15aad54fcae18d951f06a2ec3f78db5708d3b59a48e9c3fbd/jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206", size = 509461, upload-time = "2025-09-15T09:19:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/34/a5/f8bad793010534ea73c985caaeef8cc22dfb1fedb15220ecdf15c623c07a/jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b", size = 206664, upload-time = "2025-09-15T09:19:40.096Z" }, + { url = "https://files.pythonhosted.org/packages/ed/42/5823ec2b1469395a160b4bf5f14326b4a098f3b6898fbd327366789fa5d3/jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c", size = 203520, upload-time = "2025-09-15T09:19:41.798Z" }, + { url = "https://files.pythonhosted.org/packages/70/f3/ce100253c80063a7b8b406e1d1562657fd4b9b4e1b562db40e68645342fb/jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7", size = 336380, upload-time = "2025-09-15T09:20:36.867Z" }, ] [[package]] @@ -2920,11 +2960,11 @@ wheels = [ [[package]] name = "json-repair" -version = "0.50.1" +version = "0.52.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/91/71/6d57ed93e43e98cdd124e82ab6231c6817f06a10743e7ae4bc6f66d03a02/json_repair-0.50.1.tar.gz", hash = "sha256:4ee69bc4be7330fbb90a3f19e890852c5fe1ceacec5ed1d2c25cdeeebdfaec76", size = 34864, upload-time = "2025-09-06T05:43:34.331Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/63/2c3c3c8cc1c28a0a20a9ab0eff5439c989ce3cc5956d8a4c7cf1eae0a06e/json_repair-0.52.0.tar.gz", hash = "sha256:0eee59cb3145b462b0734d4cf3246b797686caa669d52eee8dd30e09ea6d7876", size = 35384, upload-time = "2025-10-05T17:18:12.387Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/be/b1e05740d9c6f333dab67910f3894e2e2416c1ef00f9f7e20a327ab1f396/json_repair-0.50.1-py3-none-any.whl", hash = "sha256:9b78358bb7572a6e0b8effe7a8bd8cb959a3e311144842b1d2363fe39e2f13c5", size = 26020, upload-time = "2025-09-06T05:43:32.718Z" }, + { url = "https://files.pythonhosted.org/packages/c6/7f/3a4e456da9a0f9ac54d9842ed51e96960826a98456f0826a9b3e808713c4/json_repair-0.52.0-py3-none-any.whl", hash = "sha256:c783069906a456f62e2a553fbef32a420a4745ff943e2014411728edcc7bf60a", size = 26350, upload-time = "2025-10-05T17:18:10.859Z" }, ] [[package]] @@ -2980,13 +3020,12 @@ wheels = [ [[package]] name = "kubernetes" -version = "33.1.0" +version = "34.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "durationpy" }, { name = "google-auth" }, - { name = "oauthlib" }, { name = "python-dateutil" }, { name = "pyyaml" }, { name = "requests" }, @@ -2995,9 +3034,9 @@ dependencies = [ { name = "urllib3" }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/52/19ebe8004c243fdfa78268a96727c71e08f00ff6fe69a301d0b7fcbce3c2/kubernetes-33.1.0.tar.gz", hash = "sha256:f64d829843a54c251061a8e7a14523b521f2dc5c896cf6d65ccf348648a88993", size = 1036779, upload-time = "2025-06-09T21:57:58.521Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/55/3f880ef65f559cbed44a9aa20d3bdbc219a2c3a3bac4a30a513029b03ee9/kubernetes-34.1.0.tar.gz", hash = "sha256:8fe8edb0b5d290a2f3ac06596b23f87c658977d46b5f8df9d0f4ea83d0003912", size = 1083771, upload-time = "2025-09-29T20:23:49.283Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/43/d9bebfc3db7dea6ec80df5cb2aad8d274dd18ec2edd6c4f21f32c237cbbb/kubernetes-33.1.0-py2.py3-none-any.whl", hash = "sha256:544de42b24b64287f7e0aa9513c93cb503f7f40eea39b20f66810011a86eabc5", size = 1941335, upload-time = "2025-06-09T21:57:56.327Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/65f7d563aa4a62dd58777e8f6aa882f15db53b14eb29aba0c28a20f7eb26/kubernetes-34.1.0-py2.py3-none-any.whl", hash = "sha256:bffba2272534e224e6a7a74d582deb0b545b7c9879d2cd9e4aae9481d1f2cc2a", size = 2008380, upload-time = "2025-09-29T20:23:47.684Z" }, ] [[package]] @@ -3045,88 +3084,92 @@ wheels = [ [[package]] name = "litellm" -version = "1.63.7" +version = "1.77.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, { name = "click" }, + { name = "fastuuid" }, { name = "httpx" }, { name = "importlib-metadata" }, { name = "jinja2" }, { name = "jsonschema" }, { name = "openai" }, + { name = "pondpond" }, { name = "pydantic" }, { name = "python-dotenv" }, { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5c/7a/6c1994a239abd1b335001a46ae47fa055a24c493b6de19a9fa1872187fe9/litellm-1.63.7.tar.gz", hash = "sha256:2fbd7236d5e5379eee18556857ed62a5ed49f4f09e03ff33cf15932306b984f1", size = 6598034, upload-time = "2025-03-12T19:26:40.915Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/b7/0d3c6dbcff3064238d123f90ae96764a85352f3f5caab6695a55007fd019/litellm-1.77.4.tar.gz", hash = "sha256:ce652e10ecf5b36767bfdf58e53b2802e22c3de383b03554e6ee1a4a66fa743d", size = 10330773, upload-time = "2025-09-24T17:52:44.876Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/44/255c7ecb8b6f3f730a37422736509c21cb1bf4da66cc060d872005bda9f5/litellm-1.63.7-py3-none-any.whl", hash = "sha256:fbdee39a894506c68f158c6b4e0079f9e9c023441fff7215e7b8e42162dba0a7", size = 6909807, upload-time = "2025-03-12T19:26:37.788Z" }, + { url = "https://files.pythonhosted.org/packages/3c/32/90f8587818d146d604ed6eec95f96378363fda06b14817399cc68853383e/litellm-1.77.4-py3-none-any.whl", hash = "sha256:66c2bb776f1e19ceddfa977a2bbf7f05e6f26c4b1fec8b2093bd171d842701b8", size = 9138493, upload-time = "2025-09-24T17:52:40.764Z" }, ] [[package]] name = "llvmlite" -version = "0.44.0" +version = "0.45.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/89/6a/95a3d3610d5c75293d5dbbb2a76480d5d4eeba641557b69fe90af6c5b84e/llvmlite-0.44.0.tar.gz", hash = "sha256:07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4", size = 171880, upload-time = "2025-01-20T11:14:41.342Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/8d/5baf1cef7f9c084fb35a8afbde88074f0d6a727bc63ef764fe0e7543ba40/llvmlite-0.45.1.tar.gz", hash = "sha256:09430bb9d0bb58fc45a45a57c7eae912850bedc095cd0810a57de109c69e1c32", size = 185600, upload-time = "2025-10-01T17:59:52.046Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/e2/86b245397052386595ad726f9742e5223d7aea999b18c518a50e96c3aca4/llvmlite-0.44.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:eed7d5f29136bda63b6d7804c279e2b72e08c952b7c5df61f45db408e0ee52f3", size = 28132305, upload-time = "2025-01-20T11:12:53.936Z" }, - { url = "https://files.pythonhosted.org/packages/ff/ec/506902dc6870249fbe2466d9cf66d531265d0f3a1157213c8f986250c033/llvmlite-0.44.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ace564d9fa44bb91eb6e6d8e7754977783c68e90a471ea7ce913bff30bd62427", size = 26201090, upload-time = "2025-01-20T11:12:59.847Z" }, - { url = "https://files.pythonhosted.org/packages/99/fe/d030f1849ebb1f394bb3f7adad5e729b634fb100515594aca25c354ffc62/llvmlite-0.44.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5d22c3bfc842668168a786af4205ec8e3ad29fb1bc03fd11fd48460d0df64c1", size = 42361858, upload-time = "2025-01-20T11:13:07.623Z" }, - { url = "https://files.pythonhosted.org/packages/d7/7a/ce6174664b9077fc673d172e4c888cb0b128e707e306bc33fff8c2035f0d/llvmlite-0.44.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f01a394e9c9b7b1d4e63c327b096d10f6f0ed149ef53d38a09b3749dcf8c9610", size = 41184200, upload-time = "2025-01-20T11:13:20.058Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c6/258801143975a6d09a373f2641237992496e15567b907a4d401839d671b8/llvmlite-0.44.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8489634d43c20cd0ad71330dde1d5bc7b9966937a263ff1ec1cebb90dc50955", size = 30331193, upload-time = "2025-01-20T11:13:26.976Z" }, - { url = "https://files.pythonhosted.org/packages/15/86/e3c3195b92e6e492458f16d233e58a1a812aa2bfbef9bdd0fbafcec85c60/llvmlite-0.44.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:1d671a56acf725bf1b531d5ef76b86660a5ab8ef19bb6a46064a705c6ca80aad", size = 28132297, upload-time = "2025-01-20T11:13:32.57Z" }, - { url = "https://files.pythonhosted.org/packages/d6/53/373b6b8be67b9221d12b24125fd0ec56b1078b660eeae266ec388a6ac9a0/llvmlite-0.44.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f79a728e0435493611c9f405168682bb75ffd1fbe6fc360733b850c80a026db", size = 26201105, upload-time = "2025-01-20T11:13:38.744Z" }, - { url = "https://files.pythonhosted.org/packages/cb/da/8341fd3056419441286c8e26bf436923021005ece0bff5f41906476ae514/llvmlite-0.44.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0143a5ef336da14deaa8ec26c5449ad5b6a2b564df82fcef4be040b9cacfea9", size = 42361901, upload-time = "2025-01-20T11:13:46.711Z" }, - { url = "https://files.pythonhosted.org/packages/53/ad/d79349dc07b8a395a99153d7ce8b01d6fcdc9f8231355a5df55ded649b61/llvmlite-0.44.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d752f89e31b66db6f8da06df8b39f9b91e78c5feea1bf9e8c1fba1d1c24c065d", size = 41184247, upload-time = "2025-01-20T11:13:56.159Z" }, - { url = "https://files.pythonhosted.org/packages/e2/3b/a9a17366af80127bd09decbe2a54d8974b6d8b274b39bf47fbaedeec6307/llvmlite-0.44.0-cp312-cp312-win_amd64.whl", hash = "sha256:eae7e2d4ca8f88f89d315b48c6b741dcb925d6a1042da694aa16ab3dd4cbd3a1", size = 30332380, upload-time = "2025-01-20T11:14:02.442Z" }, + { url = "https://files.pythonhosted.org/packages/04/ad/9bdc87b2eb34642c1cfe6bcb4f5db64c21f91f26b010f263e7467e7536a3/llvmlite-0.45.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:60f92868d5d3af30b4239b50e1717cb4e4e54f6ac1c361a27903b318d0f07f42", size = 43043526, upload-time = "2025-10-01T18:03:15.051Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ea/c25c6382f452a943b4082da5e8c1665ce29a62884e2ec80608533e8e82d5/llvmlite-0.45.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98baab513e19beb210f1ef39066288784839a44cd504e24fff5d17f1b3cf0860", size = 37253118, upload-time = "2025-10-01T18:04:06.783Z" }, + { url = "https://files.pythonhosted.org/packages/fe/af/85fc237de98b181dbbe8647324331238d6c52a3554327ccdc83ced28efba/llvmlite-0.45.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3adc2355694d6a6fbcc024d59bb756677e7de506037c878022d7b877e7613a36", size = 56288209, upload-time = "2025-10-01T18:01:00.168Z" }, + { url = "https://files.pythonhosted.org/packages/0a/df/3daf95302ff49beff4230065e3178cd40e71294968e8d55baf4a9e560814/llvmlite-0.45.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2f3377a6db40f563058c9515dedcc8a3e562d8693a106a28f2ddccf2c8fcf6ca", size = 55140958, upload-time = "2025-10-01T18:02:11.199Z" }, + { url = "https://files.pythonhosted.org/packages/a4/56/4c0d503fe03bac820ecdeb14590cf9a248e120f483bcd5c009f2534f23f0/llvmlite-0.45.1-cp311-cp311-win_amd64.whl", hash = "sha256:f9c272682d91e0d57f2a76c6d9ebdfccc603a01828cdbe3d15273bdca0c3363a", size = 38132232, upload-time = "2025-10-01T18:04:52.181Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7c/82cbd5c656e8991bcc110c69d05913be2229302a92acb96109e166ae31fb/llvmlite-0.45.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:28e763aba92fe9c72296911e040231d486447c01d4f90027c8e893d89d49b20e", size = 43043524, upload-time = "2025-10-01T18:03:30.666Z" }, + { url = "https://files.pythonhosted.org/packages/9d/bc/5314005bb2c7ee9f33102c6456c18cc81745d7055155d1218f1624463774/llvmlite-0.45.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1a53f4b74ee9fd30cb3d27d904dadece67a7575198bd80e687ee76474620735f", size = 37253123, upload-time = "2025-10-01T18:04:18.177Z" }, + { url = "https://files.pythonhosted.org/packages/96/76/0f7154952f037cb320b83e1c952ec4a19d5d689cf7d27cb8a26887d7bbc1/llvmlite-0.45.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b3796b1b1e1c14dcae34285d2f4ea488402fbd2c400ccf7137603ca3800864f", size = 56288211, upload-time = "2025-10-01T18:01:24.079Z" }, + { url = "https://files.pythonhosted.org/packages/00/b1/0b581942be2683ceb6862d558979e87387e14ad65a1e4db0e7dd671fa315/llvmlite-0.45.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:779e2f2ceefef0f4368548685f0b4adde34e5f4b457e90391f570a10b348d433", size = 55140958, upload-time = "2025-10-01T18:02:30.482Z" }, + { url = "https://files.pythonhosted.org/packages/33/94/9ba4ebcf4d541a325fd8098ddc073b663af75cc8b065b6059848f7d4dce7/llvmlite-0.45.1-cp312-cp312-win_amd64.whl", hash = "sha256:9e6c9949baf25d9aa9cd7cf0f6d011b9ca660dd17f5ba2b23bdbdb77cc86b116", size = 38132231, upload-time = "2025-10-01T18:05:03.664Z" }, ] [[package]] name = "lxml" -version = "6.0.1" +version = "6.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8f/bd/f9d01fd4132d81c6f43ab01983caea69ec9614b913c290a26738431a015d/lxml-6.0.1.tar.gz", hash = "sha256:2b3a882ebf27dd026df3801a87cf49ff791336e0f94b0fad195db77e01240690", size = 4070214, upload-time = "2025-08-22T10:37:53.525Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/c8/262c1d19339ef644cdc9eb5aad2e85bd2d1fa2d7c71cdef3ede1a3eed84d/lxml-6.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c6acde83f7a3d6399e6d83c1892a06ac9b14ea48332a5fbd55d60b9897b9570a", size = 8422719, upload-time = "2025-08-22T10:32:24.848Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d4/1b0afbeb801468a310642c3a6f6704e53c38a4a6eb1ca6faea013333e02f/lxml-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0d21c9cacb6a889cbb8eeb46c77ef2c1dd529cde10443fdeb1de847b3193c541", size = 4575763, upload-time = "2025-08-22T10:32:27.057Z" }, - { url = "https://files.pythonhosted.org/packages/5b/c1/8db9b5402bf52ceb758618313f7423cd54aea85679fcf607013707d854a8/lxml-6.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:847458b7cd0d04004895f1fb2cca8e7c0f8ec923c49c06b7a72ec2d48ea6aca2", size = 4943244, upload-time = "2025-08-22T10:32:28.847Z" }, - { url = "https://files.pythonhosted.org/packages/e7/78/838e115358dd2369c1c5186080dd874a50a691fb5cd80db6afe5e816e2c6/lxml-6.0.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1dc13405bf315d008fe02b1472d2a9d65ee1c73c0a06de5f5a45e6e404d9a1c0", size = 5081725, upload-time = "2025-08-22T10:32:30.666Z" }, - { url = "https://files.pythonhosted.org/packages/c7/b6/bdcb3a3ddd2438c5b1a1915161f34e8c85c96dc574b0ef3be3924f36315c/lxml-6.0.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f540c229a8c0a770dcaf6d5af56a5295e0fc314fc7ef4399d543328054bcea", size = 5021238, upload-time = "2025-08-22T10:32:32.49Z" }, - { url = "https://files.pythonhosted.org/packages/73/e5/1bfb96185dc1a64c7c6fbb7369192bda4461952daa2025207715f9968205/lxml-6.0.1-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:d2f73aef768c70e8deb8c4742fca4fd729b132fda68458518851c7735b55297e", size = 5343744, upload-time = "2025-08-22T10:32:34.385Z" }, - { url = "https://files.pythonhosted.org/packages/a2/ae/df3ea9ebc3c493b9c6bdc6bd8c554ac4e147f8d7839993388aab57ec606d/lxml-6.0.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e7f4066b85a4fa25ad31b75444bd578c3ebe6b8ed47237896341308e2ce923c3", size = 5223477, upload-time = "2025-08-22T10:32:36.256Z" }, - { url = "https://files.pythonhosted.org/packages/37/b3/65e1e33600542c08bc03a4c5c9c306c34696b0966a424a3be6ffec8038ed/lxml-6.0.1-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:0cce65db0cd8c750a378639900d56f89f7d6af11cd5eda72fde054d27c54b8ce", size = 4676626, upload-time = "2025-08-22T10:32:38.793Z" }, - { url = "https://files.pythonhosted.org/packages/7a/46/ee3ed8f3a60e9457d7aea46542d419917d81dbfd5700fe64b2a36fb5ef61/lxml-6.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c372d42f3eee5844b69dcab7b8d18b2f449efd54b46ac76970d6e06b8e8d9a66", size = 5066042, upload-time = "2025-08-22T10:32:41.134Z" }, - { url = "https://files.pythonhosted.org/packages/9c/b9/8394538e7cdbeb3bfa36bc74924be1a4383e0bb5af75f32713c2c4aa0479/lxml-6.0.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2e2b0e042e1408bbb1c5f3cfcb0f571ff4ac98d8e73f4bf37c5dd179276beedd", size = 4724714, upload-time = "2025-08-22T10:32:43.94Z" }, - { url = "https://files.pythonhosted.org/packages/b3/21/3ef7da1ea2a73976c1a5a311d7cde5d379234eec0968ee609517714940b4/lxml-6.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cc73bb8640eadd66d25c5a03175de6801f63c535f0f3cf50cac2f06a8211f420", size = 5247376, upload-time = "2025-08-22T10:32:46.263Z" }, - { url = "https://files.pythonhosted.org/packages/26/7d/0980016f124f00c572cba6f4243e13a8e80650843c66271ee692cddf25f3/lxml-6.0.1-cp311-cp311-win32.whl", hash = "sha256:7c23fd8c839708d368e406282d7953cee5134f4592ef4900026d84566d2b4c88", size = 3609499, upload-time = "2025-08-22T10:32:48.156Z" }, - { url = "https://files.pythonhosted.org/packages/b1/08/28440437521f265eff4413eb2a65efac269c4c7db5fd8449b586e75d8de2/lxml-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:2516acc6947ecd3c41a4a4564242a87c6786376989307284ddb115f6a99d927f", size = 4036003, upload-time = "2025-08-22T10:32:50.662Z" }, - { url = "https://files.pythonhosted.org/packages/7b/dc/617e67296d98099213a505d781f04804e7b12923ecd15a781a4ab9181992/lxml-6.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:cb46f8cfa1b0334b074f40c0ff94ce4d9a6755d492e6c116adb5f4a57fb6ad96", size = 3679662, upload-time = "2025-08-22T10:32:52.739Z" }, - { url = "https://files.pythonhosted.org/packages/b0/a9/82b244c8198fcdf709532e39a1751943a36b3e800b420adc739d751e0299/lxml-6.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c03ac546adaabbe0b8e4a15d9ad815a281afc8d36249c246aecf1aaad7d6f200", size = 8422788, upload-time = "2025-08-22T10:32:56.612Z" }, - { url = "https://files.pythonhosted.org/packages/c9/8d/1ed2bc20281b0e7ed3e6c12b0a16e64ae2065d99be075be119ba88486e6d/lxml-6.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33b862c7e3bbeb4ba2c96f3a039f925c640eeba9087a4dc7a572ec0f19d89392", size = 4593547, upload-time = "2025-08-22T10:32:59.016Z" }, - { url = "https://files.pythonhosted.org/packages/76/53/d7fd3af95b72a3493bf7fbe842a01e339d8f41567805cecfecd5c71aa5ee/lxml-6.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7a3ec1373f7d3f519de595032d4dcafae396c29407cfd5073f42d267ba32440d", size = 4948101, upload-time = "2025-08-22T10:33:00.765Z" }, - { url = "https://files.pythonhosted.org/packages/9d/51/4e57cba4d55273c400fb63aefa2f0d08d15eac021432571a7eeefee67bed/lxml-6.0.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:03b12214fb1608f4cffa181ec3d046c72f7e77c345d06222144744c122ded870", size = 5108090, upload-time = "2025-08-22T10:33:03.108Z" }, - { url = "https://files.pythonhosted.org/packages/f6/6e/5f290bc26fcc642bc32942e903e833472271614e24d64ad28aaec09d5dae/lxml-6.0.1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:207ae0d5f0f03b30f95e649a6fa22aa73f5825667fee9c7ec6854d30e19f2ed8", size = 5021791, upload-time = "2025-08-22T10:33:06.972Z" }, - { url = "https://files.pythonhosted.org/packages/13/d4/2e7551a86992ece4f9a0f6eebd4fb7e312d30f1e372760e2109e721d4ce6/lxml-6.0.1-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:32297b09ed4b17f7b3f448de87a92fb31bb8747496623483788e9f27c98c0f00", size = 5358861, upload-time = "2025-08-22T10:33:08.967Z" }, - { url = "https://files.pythonhosted.org/packages/8a/5f/cb49d727fc388bf5fd37247209bab0da11697ddc5e976ccac4826599939e/lxml-6.0.1-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7e18224ea241b657a157c85e9cac82c2b113ec90876e01e1f127312006233756", size = 5652569, upload-time = "2025-08-22T10:33:10.815Z" }, - { url = "https://files.pythonhosted.org/packages/ca/b8/66c1ef8c87ad0f958b0a23998851e610607c74849e75e83955d5641272e6/lxml-6.0.1-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a07a994d3c46cd4020c1ea566345cf6815af205b1e948213a4f0f1d392182072", size = 5252262, upload-time = "2025-08-22T10:33:12.673Z" }, - { url = "https://files.pythonhosted.org/packages/1a/ef/131d3d6b9590e64fdbb932fbc576b81fcc686289da19c7cb796257310e82/lxml-6.0.1-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:2287fadaa12418a813b05095485c286c47ea58155930cfbd98c590d25770e225", size = 4710309, upload-time = "2025-08-22T10:33:14.952Z" }, - { url = "https://files.pythonhosted.org/packages/bc/3f/07f48ae422dce44902309aa7ed386c35310929dc592439c403ec16ef9137/lxml-6.0.1-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b4e597efca032ed99f418bd21314745522ab9fa95af33370dcee5533f7f70136", size = 5265786, upload-time = "2025-08-22T10:33:16.721Z" }, - { url = "https://files.pythonhosted.org/packages/11/c7/125315d7b14ab20d9155e8316f7d287a4956098f787c22d47560b74886c4/lxml-6.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9696d491f156226decdd95d9651c6786d43701e49f32bf23715c975539aa2b3b", size = 5062272, upload-time = "2025-08-22T10:33:18.478Z" }, - { url = "https://files.pythonhosted.org/packages/8b/c3/51143c3a5fc5168a7c3ee626418468ff20d30f5a59597e7b156c1e61fba8/lxml-6.0.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e4e3cd3585f3c6f87cdea44cda68e692cc42a012f0131d25957ba4ce755241a7", size = 4786955, upload-time = "2025-08-22T10:33:20.34Z" }, - { url = "https://files.pythonhosted.org/packages/11/86/73102370a420ec4529647b31c4a8ce8c740c77af3a5fae7a7643212d6f6e/lxml-6.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:45cbc92f9d22c28cd3b97f8d07fcefa42e569fbd587dfdac76852b16a4924277", size = 5673557, upload-time = "2025-08-22T10:33:22.282Z" }, - { url = "https://files.pythonhosted.org/packages/d7/2d/aad90afaec51029aef26ef773b8fd74a9e8706e5e2f46a57acd11a421c02/lxml-6.0.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:f8c9bcfd2e12299a442fba94459adf0b0d001dbc68f1594439bfa10ad1ecb74b", size = 5254211, upload-time = "2025-08-22T10:33:24.15Z" }, - { url = "https://files.pythonhosted.org/packages/63/01/c9e42c8c2d8b41f4bdefa42ab05448852e439045f112903dd901b8fbea4d/lxml-6.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1e9dc2b9f1586e7cd77753eae81f8d76220eed9b768f337dc83a3f675f2f0cf9", size = 5275817, upload-time = "2025-08-22T10:33:26.007Z" }, - { url = "https://files.pythonhosted.org/packages/bc/1f/962ea2696759abe331c3b0e838bb17e92224f39c638c2068bf0d8345e913/lxml-6.0.1-cp312-cp312-win32.whl", hash = "sha256:987ad5c3941c64031f59c226167f55a04d1272e76b241bfafc968bdb778e07fb", size = 3610889, upload-time = "2025-08-22T10:33:28.169Z" }, - { url = "https://files.pythonhosted.org/packages/41/e2/22c86a990b51b44442b75c43ecb2f77b8daba8c4ba63696921966eac7022/lxml-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:abb05a45394fd76bf4a60c1b7bec0e6d4e8dfc569fc0e0b1f634cd983a006ddc", size = 4010925, upload-time = "2025-08-22T10:33:29.874Z" }, - { url = "https://files.pythonhosted.org/packages/b2/21/dc0c73325e5eb94ef9c9d60dbb5dcdcb2e7114901ea9509735614a74e75a/lxml-6.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:c4be29bce35020d8579d60aa0a4e95effd66fcfce31c46ffddf7e5422f73a299", size = 3671922, upload-time = "2025-08-22T10:33:31.535Z" }, - { url = "https://files.pythonhosted.org/packages/41/37/41961f53f83ded57b37e65e4f47d1c6c6ef5fd02cb1d6ffe028ba0efa7d4/lxml-6.0.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b556aaa6ef393e989dac694b9c95761e32e058d5c4c11ddeef33f790518f7a5e", size = 3903412, upload-time = "2025-08-22T10:37:40.758Z" }, - { url = "https://files.pythonhosted.org/packages/3d/47/8631ea73f3dc776fb6517ccde4d5bd5072f35f9eacbba8c657caa4037a69/lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:64fac7a05ebb3737b79fd89fe5a5b6c5546aac35cfcfd9208eb6e5d13215771c", size = 4224810, upload-time = "2025-08-22T10:37:42.839Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b8/39ae30ca3b1516729faeef941ed84bf8f12321625f2644492ed8320cb254/lxml-6.0.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:038d3c08babcfce9dc89aaf498e6da205efad5b7106c3b11830a488d4eadf56b", size = 4329221, upload-time = "2025-08-22T10:37:45.223Z" }, - { url = "https://files.pythonhosted.org/packages/9c/ea/048dea6cdfc7a72d40ae8ed7e7d23cf4a6b6a6547b51b492a3be50af0e80/lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:445f2cee71c404ab4259bc21e20339a859f75383ba2d7fb97dfe7c163994287b", size = 4270228, upload-time = "2025-08-22T10:37:47.276Z" }, - { url = "https://files.pythonhosted.org/packages/6b/d4/c2b46e432377c45d611ae2f669aa47971df1586c1a5240675801d0f02bac/lxml-6.0.1-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e352d8578e83822d70bea88f3d08b9912528e4c338f04ab707207ab12f4b7aac", size = 4416077, upload-time = "2025-08-22T10:37:49.822Z" }, - { url = "https://files.pythonhosted.org/packages/b6/db/8f620f1ac62cf32554821b00b768dd5957ac8e3fd051593532be5b40b438/lxml-6.0.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:51bd5d1a9796ca253db6045ab45ca882c09c071deafffc22e06975b7ace36300", size = 3518127, upload-time = "2025-08-22T10:37:51.66Z" }, + { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" }, + { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" }, + { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" }, + { url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152, upload-time = "2025-09-22T04:00:51.709Z" }, + { url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539, upload-time = "2025-09-22T04:00:53.593Z" }, + { url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853, upload-time = "2025-09-22T04:00:55.524Z" }, + { url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133, upload-time = "2025-09-22T04:00:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944, upload-time = "2025-09-22T04:00:59.052Z" }, + { url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535, upload-time = "2025-09-22T04:01:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343, upload-time = "2025-09-22T04:01:03.13Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419, upload-time = "2025-09-22T04:01:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008, upload-time = "2025-09-22T04:01:07.327Z" }, + { url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906, upload-time = "2025-09-22T04:01:09.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357, upload-time = "2025-09-22T04:01:11.102Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583, upload-time = "2025-09-22T04:01:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591, upload-time = "2025-09-22T04:01:14.874Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" }, + { url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179, upload-time = "2025-09-22T04:01:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044, upload-time = "2025-09-22T04:01:25.118Z" }, + { url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685, upload-time = "2025-09-22T04:01:27.398Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127, upload-time = "2025-09-22T04:01:29.629Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958, upload-time = "2025-09-22T04:01:31.535Z" }, + { url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541, upload-time = "2025-09-22T04:01:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426, upload-time = "2025-09-22T04:01:35.639Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917, upload-time = "2025-09-22T04:01:37.448Z" }, + { url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795, upload-time = "2025-09-22T04:01:39.165Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759, upload-time = "2025-09-22T04:01:41.506Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666, upload-time = "2025-09-22T04:01:43.363Z" }, + { url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989, upload-time = "2025-09-22T04:01:45.215Z" }, + { url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" }, + { url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" }, + { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" }, + { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" }, + { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" }, + { url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119, upload-time = "2025-09-22T04:04:51.801Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314, upload-time = "2025-09-22T04:04:55.024Z" }, + { url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768, upload-time = "2025-09-22T04:04:57.097Z" }, ] [[package]] @@ -3162,6 +3205,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6c/e1/0686c91738f3e6c2e1a243e0fdd4371667c4d2e5009b0a3605806c2aa020/lz4-4.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:2f4f2965c98ab254feddf6b5072854a6935adab7bc81412ec4fe238f07b85f62", size = 89736, upload-time = "2025-04-01T22:55:40.5Z" }, ] +[[package]] +name = "madoka" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/da/eb/95288b1c4aa541eb296a6271e3f8c7ece03b78923ac47dbe95d2287d9f5e/madoka-0.7.1.tar.gz", hash = "sha256:e258baa84fc0a3764365993b8bf5e1b065383a6ca8c9f862fb3e3e709843fae7", size = 81413, upload-time = "2019-02-10T18:38:01.382Z" } + [[package]] name = "mako" version = "1.3.10" @@ -3197,30 +3246,32 @@ wheels = [ [[package]] name = "markupsafe" -version = "3.0.2" +version = "3.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, - { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, - { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, - { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, - { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, - { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, - { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, - { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, ] [[package]] @@ -3323,16 +3374,16 @@ wheels = [ [[package]] name = "msal" -version = "1.33.0" +version = "1.34.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, { name = "pyjwt", extra = ["crypto"] }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/da/81acbe0c1fd7e9e4ec35f55dadeba9833a847b9a6ba2e2d1e4432da901dd/msal-1.33.0.tar.gz", hash = "sha256:836ad80faa3e25a7d71015c990ce61f704a87328b1e73bcbb0623a18cbf17510", size = 153801, upload-time = "2025-07-22T19:36:33.693Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961, upload-time = "2025-09-22T23:05:48.989Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/5b/fbc73e91f7727ae1e79b21ed833308e99dc11cc1cd3d4717f579775de5e9/msal-1.33.0-py3-none-any.whl", hash = "sha256:c0cd41cecf8eaed733ee7e3be9e040291eba53b0f262d3ae9c58f38b04244273", size = 116853, upload-time = "2025-07-22T19:36:32.403Z" }, + { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987, upload-time = "2025-09-22T23:05:47.294Z" }, ] [[package]] @@ -3349,47 +3400,47 @@ wheels = [ [[package]] name = "multidict" -version = "6.6.4" +version = "6.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843, upload-time = "2025-08-11T12:08:48.217Z" } +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/7f/90a7f01e2d005d6653c689039977f6856718c75c5579445effb7e60923d1/multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c", size = 76472, upload-time = "2025-08-11T12:06:29.006Z" }, - { url = "https://files.pythonhosted.org/packages/54/a3/bed07bc9e2bb302ce752f1dabc69e884cd6a676da44fb0e501b246031fdd/multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb", size = 44634, upload-time = "2025-08-11T12:06:30.374Z" }, - { url = "https://files.pythonhosted.org/packages/a7/4b/ceeb4f8f33cf81277da464307afeaf164fb0297947642585884f5cad4f28/multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e", size = 44282, upload-time = "2025-08-11T12:06:31.958Z" }, - { url = "https://files.pythonhosted.org/packages/03/35/436a5da8702b06866189b69f655ffdb8f70796252a8772a77815f1812679/multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded", size = 229696, upload-time = "2025-08-11T12:06:33.087Z" }, - { url = "https://files.pythonhosted.org/packages/b6/0e/915160be8fecf1fca35f790c08fb74ca684d752fcba62c11daaf3d92c216/multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683", size = 246665, upload-time = "2025-08-11T12:06:34.448Z" }, - { url = "https://files.pythonhosted.org/packages/08/ee/2f464330acd83f77dcc346f0b1a0eaae10230291450887f96b204b8ac4d3/multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a", size = 225485, upload-time = "2025-08-11T12:06:35.672Z" }, - { url = "https://files.pythonhosted.org/packages/71/cc/9a117f828b4d7fbaec6adeed2204f211e9caf0a012692a1ee32169f846ae/multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9", size = 257318, upload-time = "2025-08-11T12:06:36.98Z" }, - { url = "https://files.pythonhosted.org/packages/25/77/62752d3dbd70e27fdd68e86626c1ae6bccfebe2bb1f84ae226363e112f5a/multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50", size = 254689, upload-time = "2025-08-11T12:06:38.233Z" }, - { url = "https://files.pythonhosted.org/packages/00/6e/fac58b1072a6fc59af5e7acb245e8754d3e1f97f4f808a6559951f72a0d4/multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52", size = 246709, upload-time = "2025-08-11T12:06:39.517Z" }, - { url = "https://files.pythonhosted.org/packages/01/ef/4698d6842ef5e797c6db7744b0081e36fb5de3d00002cc4c58071097fac3/multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6", size = 243185, upload-time = "2025-08-11T12:06:40.796Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c9/d82e95ae1d6e4ef396934e9b0e942dfc428775f9554acf04393cce66b157/multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e", size = 237838, upload-time = "2025-08-11T12:06:42.595Z" }, - { url = "https://files.pythonhosted.org/packages/57/cf/f94af5c36baaa75d44fab9f02e2a6bcfa0cd90acb44d4976a80960759dbc/multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3", size = 246368, upload-time = "2025-08-11T12:06:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/4a/fe/29f23460c3d995f6a4b678cb2e9730e7277231b981f0b234702f0177818a/multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c", size = 253339, upload-time = "2025-08-11T12:06:45.597Z" }, - { url = "https://files.pythonhosted.org/packages/29/b6/fd59449204426187b82bf8a75f629310f68c6adc9559dc922d5abe34797b/multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b", size = 246933, upload-time = "2025-08-11T12:06:46.841Z" }, - { url = "https://files.pythonhosted.org/packages/19/52/d5d6b344f176a5ac3606f7a61fb44dc746e04550e1a13834dff722b8d7d6/multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f", size = 242225, upload-time = "2025-08-11T12:06:48.588Z" }, - { url = "https://files.pythonhosted.org/packages/ec/d3/5b2281ed89ff4d5318d82478a2a2450fcdfc3300da48ff15c1778280ad26/multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2", size = 41306, upload-time = "2025-08-11T12:06:49.95Z" }, - { url = "https://files.pythonhosted.org/packages/74/7d/36b045c23a1ab98507aefd44fd8b264ee1dd5e5010543c6fccf82141ccef/multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e", size = 46029, upload-time = "2025-08-11T12:06:51.082Z" }, - { url = "https://files.pythonhosted.org/packages/0f/5e/553d67d24432c5cd52b49047f2d248821843743ee6d29a704594f656d182/multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf", size = 43017, upload-time = "2025-08-11T12:06:52.243Z" }, - { url = "https://files.pythonhosted.org/packages/05/f6/512ffd8fd8b37fb2680e5ac35d788f1d71bbaf37789d21a820bdc441e565/multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8", size = 76516, upload-time = "2025-08-11T12:06:53.393Z" }, - { url = "https://files.pythonhosted.org/packages/99/58/45c3e75deb8855c36bd66cc1658007589662ba584dbf423d01df478dd1c5/multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3", size = 45394, upload-time = "2025-08-11T12:06:54.555Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ca/e8c4472a93a26e4507c0b8e1f0762c0d8a32de1328ef72fd704ef9cc5447/multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b", size = 43591, upload-time = "2025-08-11T12:06:55.672Z" }, - { url = "https://files.pythonhosted.org/packages/05/51/edf414f4df058574a7265034d04c935aa84a89e79ce90fcf4df211f47b16/multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287", size = 237215, upload-time = "2025-08-11T12:06:57.213Z" }, - { url = "https://files.pythonhosted.org/packages/c8/45/8b3d6dbad8cf3252553cc41abea09ad527b33ce47a5e199072620b296902/multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138", size = 258299, upload-time = "2025-08-11T12:06:58.946Z" }, - { url = "https://files.pythonhosted.org/packages/3c/e8/8ca2e9a9f5a435fc6db40438a55730a4bf4956b554e487fa1b9ae920f825/multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6", size = 242357, upload-time = "2025-08-11T12:07:00.301Z" }, - { url = "https://files.pythonhosted.org/packages/0f/84/80c77c99df05a75c28490b2af8f7cba2a12621186e0a8b0865d8e745c104/multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9", size = 268369, upload-time = "2025-08-11T12:07:01.638Z" }, - { url = "https://files.pythonhosted.org/packages/0d/e9/920bfa46c27b05fb3e1ad85121fd49f441492dca2449c5bcfe42e4565d8a/multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c", size = 269341, upload-time = "2025-08-11T12:07:02.943Z" }, - { url = "https://files.pythonhosted.org/packages/af/65/753a2d8b05daf496f4a9c367fe844e90a1b2cac78e2be2c844200d10cc4c/multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402", size = 256100, upload-time = "2025-08-11T12:07:04.564Z" }, - { url = "https://files.pythonhosted.org/packages/09/54/655be13ae324212bf0bc15d665a4e34844f34c206f78801be42f7a0a8aaa/multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7", size = 253584, upload-time = "2025-08-11T12:07:05.914Z" }, - { url = "https://files.pythonhosted.org/packages/5c/74/ab2039ecc05264b5cec73eb018ce417af3ebb384ae9c0e9ed42cb33f8151/multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f", size = 251018, upload-time = "2025-08-11T12:07:08.301Z" }, - { url = "https://files.pythonhosted.org/packages/af/0a/ccbb244ac848e56c6427f2392741c06302bbfba49c0042f1eb3c5b606497/multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d", size = 251477, upload-time = "2025-08-11T12:07:10.248Z" }, - { url = "https://files.pythonhosted.org/packages/0e/b0/0ed49bba775b135937f52fe13922bc64a7eaf0a3ead84a36e8e4e446e096/multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7", size = 263575, upload-time = "2025-08-11T12:07:11.928Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d9/7fb85a85e14de2e44dfb6a24f03c41e2af8697a6df83daddb0e9b7569f73/multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802", size = 259649, upload-time = "2025-08-11T12:07:13.244Z" }, - { url = "https://files.pythonhosted.org/packages/03/9e/b3a459bcf9b6e74fa461a5222a10ff9b544cb1cd52fd482fb1b75ecda2a2/multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24", size = 251505, upload-time = "2025-08-11T12:07:14.57Z" }, - { url = "https://files.pythonhosted.org/packages/86/a2/8022f78f041dfe6d71e364001a5cf987c30edfc83c8a5fb7a3f0974cff39/multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793", size = 41888, upload-time = "2025-08-11T12:07:15.904Z" }, - { url = "https://files.pythonhosted.org/packages/c7/eb/d88b1780d43a56db2cba24289fa744a9d216c1a8546a0dc3956563fd53ea/multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e", size = 46072, upload-time = "2025-08-11T12:07:17.045Z" }, - { url = "https://files.pythonhosted.org/packages/9f/16/b929320bf5750e2d9d4931835a4c638a19d2494a5b519caaaa7492ebe105/multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364", size = 43222, upload-time = "2025-08-11T12:07:18.328Z" }, - { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" }, + { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" }, + { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212, upload-time = "2025-10-06T14:48:58.042Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671, upload-time = "2025-10-06T14:49:00.004Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491, upload-time = "2025-10-06T14:49:01.393Z" }, + { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322, upload-time = "2025-10-06T14:49:02.745Z" }, + { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694, upload-time = "2025-10-06T14:49:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715, upload-time = "2025-10-06T14:49:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189, upload-time = "2025-10-06T14:49:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845, upload-time = "2025-10-06T14:49:08.759Z" }, + { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374, upload-time = "2025-10-06T14:49:10.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" }, + { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" }, + { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" }, + { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" }, + { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, + { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, + { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, + { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, + { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, + { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, + { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] [[package]] @@ -3420,14 +3471,14 @@ wheels = [ [[package]] name = "mypy-boto3-bedrock-runtime" -version = "1.40.21" +version = "1.40.41" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/ff/074a1e1425d04e7294c962803655e85e20e158734534ce8d302efaa8230a/mypy_boto3_bedrock_runtime-1.40.21.tar.gz", hash = "sha256:fa9401e86d42484a53803b1dba0782d023ab35c817256e707fbe4fff88aeb881", size = 28326, upload-time = "2025-08-29T19:25:09.405Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/38/79989f7bce998776ed1a01c17f3f58e7bc6f5fc2bcbdff929701526fa2f1/mypy_boto3_bedrock_runtime-1.40.41.tar.gz", hash = "sha256:ee9bda6d6d478c8d0995e84e884bdf1798e150d437974ae27c175774a58ffaa5", size = 28333, upload-time = "2025-09-29T19:26:04.804Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/80/02/9d3b881bee5552600c6f456e446069d5beffd2b7862b99e1e945d60d6a9b/mypy_boto3_bedrock_runtime-1.40.21-py3-none-any.whl", hash = "sha256:4c9ea181ef00cb3d15f9b051a50e3b78272122d24cd24ac34938efe6ddfecc62", size = 34149, upload-time = "2025-08-29T19:25:03.941Z" }, + { url = "https://files.pythonhosted.org/packages/3d/6c/d3431dadf473bb76aa590b1ed8cc91726a48b029b542eff9d3024f2d70b9/mypy_boto3_bedrock_runtime-1.40.41-py3-none-any.whl", hash = "sha256:d65dff200986ff06c6b3579ddcea102555f2067c8987fca379bf4f9ed8ba3121", size = 34181, upload-time = "2025-09-29T19:26:01.898Z" }, ] [[package]] @@ -3478,7 +3529,7 @@ wheels = [ [[package]] name = "nltk" -version = "3.9.1" +version = "3.9.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -3486,74 +3537,74 @@ dependencies = [ { name = "regex" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/87/db8be88ad32c2d042420b6fd9ffd4a149f9a0d7f0e86b3f543be2eeeedd2/nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868", size = 2904691, upload-time = "2024-08-18T19:48:37.769Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/76/3a5e4312c19a028770f86fd7c058cf9f4ec4321c6cf7526bab998a5b683c/nltk-3.9.2.tar.gz", hash = "sha256:0f409e9b069ca4177c1903c3e843eef90c7e92992fa4931ae607da6de49e1419", size = 2887629, upload-time = "2025-10-01T07:19:23.764Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/66/7d9e26593edda06e8cb531874633f7c2372279c3b0f46235539fe546df8b/nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1", size = 1505442, upload-time = "2024-08-18T19:48:21.909Z" }, + { url = "https://files.pythonhosted.org/packages/60/90/81ac364ef94209c100e12579629dc92bf7a709a84af32f8c551b02c07e94/nltk-3.9.2-py3-none-any.whl", hash = "sha256:1e209d2b3009110635ed9709a67a1a3e33a10f799490fa71cf4bec218c11c88a", size = 1513404, upload-time = "2025-10-01T07:19:21.648Z" }, ] [[package]] name = "nodejs-wheel-binaries" -version = "22.19.0" +version = "22.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/ca/6033f80b7aebc23cb31ed8b09608b6308c5273c3522aedd043e8a0644d83/nodejs_wheel_binaries-22.19.0.tar.gz", hash = "sha256:e69b97ef443d36a72602f7ed356c6a36323873230f894799f4270a853932fdb3", size = 8060, upload-time = "2025-09-12T10:33:46.935Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/54/02f58c8119e2f1984e2572cc77a7b469dbaf4f8d171ad376e305749ef48e/nodejs_wheel_binaries-22.20.0.tar.gz", hash = "sha256:a62d47c9fd9c32191dff65bbe60261504f26992a0a19fe8b4d523256a84bd351", size = 8058, upload-time = "2025-09-26T09:48:00.906Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/a2/0d055fd1d8c9a7a971c4db10cf42f3bba57c964beb6cf383ca053f2cdd20/nodejs_wheel_binaries-22.19.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:43eca1526455a1fb4cb777095198f7ebe5111a4444749c87f5c2b84645aaa72a", size = 50902454, upload-time = "2025-09-12T10:33:18.3Z" }, - { url = "https://files.pythonhosted.org/packages/b5/f5/446f7b3c5be1d2f5145ffa3c9aac3496e06cdf0f436adeb21a1f95dd79a7/nodejs_wheel_binaries-22.19.0-py2.py3-none-macosx_11_0_x86_64.whl", hash = "sha256:feb06709e1320790d34babdf71d841ec7f28e4c73217d733e7f5023060a86bfc", size = 51837860, upload-time = "2025-09-12T10:33:21.599Z" }, - { url = "https://files.pythonhosted.org/packages/1e/4e/d0a036f04fd0f5dc3ae505430657044b8d9853c33be6b2d122bb171aaca3/nodejs_wheel_binaries-22.19.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9f5777292491430457c99228d3a267decf12a09d31246f0692391e3513285e", size = 57841528, upload-time = "2025-09-12T10:33:25.433Z" }, - { url = "https://files.pythonhosted.org/packages/e2/11/4811d27819f229cc129925c170db20c12d4f01ad366a0066f06d6eb833cf/nodejs_wheel_binaries-22.19.0-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1392896f1a05a88a8a89b26e182d90fdf3020b4598a047807b91b65731e24c00", size = 58368815, upload-time = "2025-09-12T10:33:29.083Z" }, - { url = "https://files.pythonhosted.org/packages/6e/94/df41416856b980e38a7ff280cfb59f142a77955ccdbec7cc4260d8ab2e78/nodejs_wheel_binaries-22.19.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9164c876644f949cad665e3ada00f75023e18f381e78a1d7b60ccbbfb4086e73", size = 59690937, upload-time = "2025-09-12T10:33:32.771Z" }, - { url = "https://files.pythonhosted.org/packages/d1/39/8d0d5f84b7616bdc4eca725f5d64a1cfcac3d90cf3f30cae17d12f8e987f/nodejs_wheel_binaries-22.19.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6b4b75166134010bc9cfebd30dc57047796a27049fef3fc22316216d76bc0af7", size = 60751996, upload-time = "2025-09-12T10:33:36.962Z" }, - { url = "https://files.pythonhosted.org/packages/41/93/2d66b5b60055dd1de6e37e35bef563c15e4cafa5cfe3a6990e0ab358e515/nodejs_wheel_binaries-22.19.0-py2.py3-none-win_amd64.whl", hash = "sha256:3f271f5abfc71b052a6b074225eca8c1223a0f7216863439b86feaca814f6e5a", size = 40026140, upload-time = "2025-09-12T10:33:40.33Z" }, - { url = "https://files.pythonhosted.org/packages/a3/46/c9cf7ff7e3c71f07ca8331c939afd09b6e59fc85a2944ea9411e8b29ce50/nodejs_wheel_binaries-22.19.0-py2.py3-none-win_arm64.whl", hash = "sha256:666a355fe0c9bde44a9221cd543599b029045643c8196b8eedb44f28dc192e06", size = 38804500, upload-time = "2025-09-12T10:33:43.302Z" }, + { url = "https://files.pythonhosted.org/packages/24/6d/333e5458422f12318e3c3e6e7f194353aa68b0d633217c7e89833427ca01/nodejs_wheel_binaries-22.20.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:455add5ac4f01c9c830ab6771dbfad0fdf373f9b040d3aabe8cca9b6c56654fb", size = 53246314, upload-time = "2025-09-26T09:47:32.536Z" }, + { url = "https://files.pythonhosted.org/packages/56/30/dcd6879d286a35b3c4c8f9e5e0e1bcf4f9e25fe35310fc77ecf97f915a23/nodejs_wheel_binaries-22.20.0-py2.py3-none-macosx_11_0_x86_64.whl", hash = "sha256:5d8c12f97eea7028b34a84446eb5ca81829d0c428dfb4e647e09ac617f4e21fa", size = 53644391, upload-time = "2025-09-26T09:47:36.093Z" }, + { url = "https://files.pythonhosted.org/packages/58/be/c7b2e7aa3bb281d380a1c531f84d0ccfe225832dfc3bed1ca171753b9630/nodejs_wheel_binaries-22.20.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a2b0989194148f66e9295d8f11bc463bde02cbe276517f4d20a310fb84780ae", size = 60282516, upload-time = "2025-09-26T09:47:39.88Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c5/8befacf4190e03babbae54cb0809fb1a76e1600ec3967ab8ee9f8fc85b65/nodejs_wheel_binaries-22.20.0-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5c500aa4dc046333ecb0a80f183e069e5c30ce637f1c1a37166b2c0b642dc21", size = 60347290, upload-time = "2025-09-26T09:47:43.712Z" }, + { url = "https://files.pythonhosted.org/packages/c0/bd/cfffd1e334277afa0714962c6ec432b5fe339340a6bca2e5fa8e678e7590/nodejs_wheel_binaries-22.20.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3279eb1b99521f0d20a850bbfc0159a658e0e85b843b3cf31b090d7da9f10dfc", size = 62178798, upload-time = "2025-09-26T09:47:47.752Z" }, + { url = "https://files.pythonhosted.org/packages/08/14/10b83a9c02faac985b3e9f5e65d63a34fc0f46b48d8a2c3e4caa3e1e7318/nodejs_wheel_binaries-22.20.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d29705797b33bade62d79d8f106c2453c8a26442a9b2a5576610c0f7e7c351ed", size = 62772957, upload-time = "2025-09-26T09:47:51.266Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a9/c6a480259aa0d6b270aac2c6ba73a97444b9267adde983a5b7e34f17e45a/nodejs_wheel_binaries-22.20.0-py2.py3-none-win_amd64.whl", hash = "sha256:4bd658962f24958503541963e5a6f2cc512a8cb301e48a69dc03c879f40a28ae", size = 40120431, upload-time = "2025-09-26T09:47:54.363Z" }, + { url = "https://files.pythonhosted.org/packages/42/b1/6a4eb2c6e9efa028074b0001b61008c9d202b6b46caee9e5d1b18c088216/nodejs_wheel_binaries-22.20.0-py2.py3-none-win_arm64.whl", hash = "sha256:1fccac931faa210d22b6962bcdbc99269d16221d831b9a118bbb80fe434a60b8", size = 38844133, upload-time = "2025-09-26T09:47:57.357Z" }, ] [[package]] name = "numba" -version = "0.61.2" +version = "0.62.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "llvmlite" }, { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/a0/e21f57604304aa03ebb8e098429222722ad99176a4f979d34af1d1ee80da/numba-0.61.2.tar.gz", hash = "sha256:8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d", size = 2820615, upload-time = "2025-04-09T02:58:07.659Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/20/33dbdbfe60e5fd8e3dbfde299d106279a33d9f8308346022316781368591/numba-0.62.1.tar.gz", hash = "sha256:7b774242aa890e34c21200a1fc62e5b5757d5286267e71103257f4e2af0d5161", size = 2749817, upload-time = "2025-09-29T10:46:31.551Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/97/c99d1056aed767503c228f7099dc11c402906b42a4757fec2819329abb98/numba-0.61.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:efd3db391df53aaa5cfbee189b6c910a5b471488749fd6606c3f33fc984c2ae2", size = 2775825, upload-time = "2025-04-09T02:57:43.442Z" }, - { url = "https://files.pythonhosted.org/packages/95/9e/63c549f37136e892f006260c3e2613d09d5120672378191f2dc387ba65a2/numba-0.61.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:49c980e4171948ffebf6b9a2520ea81feed113c1f4890747ba7f59e74be84b1b", size = 2778695, upload-time = "2025-04-09T02:57:44.968Z" }, - { url = "https://files.pythonhosted.org/packages/97/c8/8740616c8436c86c1b9a62e72cb891177d2c34c2d24ddcde4c390371bf4c/numba-0.61.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3945615cd73c2c7eba2a85ccc9c1730c21cd3958bfcf5a44302abae0fb07bb60", size = 3829227, upload-time = "2025-04-09T02:57:46.63Z" }, - { url = "https://files.pythonhosted.org/packages/fc/06/66e99ae06507c31d15ff3ecd1f108f2f59e18b6e08662cd5f8a5853fbd18/numba-0.61.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbfdf4eca202cebade0b7d43896978e146f39398909a42941c9303f82f403a18", size = 3523422, upload-time = "2025-04-09T02:57:48.222Z" }, - { url = "https://files.pythonhosted.org/packages/0f/a4/2b309a6a9f6d4d8cfba583401c7c2f9ff887adb5d54d8e2e130274c0973f/numba-0.61.2-cp311-cp311-win_amd64.whl", hash = "sha256:76bcec9f46259cedf888041b9886e257ae101c6268261b19fda8cfbc52bec9d1", size = 2831505, upload-time = "2025-04-09T02:57:50.108Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a0/c6b7b9c615cfa3b98c4c63f4316e3f6b3bbe2387740277006551784218cd/numba-0.61.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:34fba9406078bac7ab052efbf0d13939426c753ad72946baaa5bf9ae0ebb8dd2", size = 2776626, upload-time = "2025-04-09T02:57:51.857Z" }, - { url = "https://files.pythonhosted.org/packages/92/4a/fe4e3c2ecad72d88f5f8cd04e7f7cff49e718398a2fac02d2947480a00ca/numba-0.61.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ddce10009bc097b080fc96876d14c051cc0c7679e99de3e0af59014dab7dfe8", size = 2779287, upload-time = "2025-04-09T02:57:53.658Z" }, - { url = "https://files.pythonhosted.org/packages/9a/2d/e518df036feab381c23a624dac47f8445ac55686ec7f11083655eb707da3/numba-0.61.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b1bb509d01f23d70325d3a5a0e237cbc9544dd50e50588bc581ba860c213546", size = 3885928, upload-time = "2025-04-09T02:57:55.206Z" }, - { url = "https://files.pythonhosted.org/packages/10/0f/23cced68ead67b75d77cfcca3df4991d1855c897ee0ff3fe25a56ed82108/numba-0.61.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48a53a3de8f8793526cbe330f2a39fe9a6638efcbf11bd63f3d2f9757ae345cd", size = 3577115, upload-time = "2025-04-09T02:57:56.818Z" }, - { url = "https://files.pythonhosted.org/packages/68/1d/ddb3e704c5a8fb90142bf9dc195c27db02a08a99f037395503bfbc1d14b3/numba-0.61.2-cp312-cp312-win_amd64.whl", hash = "sha256:97cf4f12c728cf77c9c1d7c23707e4d8fb4632b46275f8f3397de33e5877af18", size = 2831929, upload-time = "2025-04-09T02:57:58.45Z" }, + { url = "https://files.pythonhosted.org/packages/dd/5f/8b3491dd849474f55e33c16ef55678ace1455c490555337899c35826836c/numba-0.62.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:f43e24b057714e480fe44bc6031de499e7cf8150c63eb461192caa6cc8530bc8", size = 2684279, upload-time = "2025-09-29T10:43:37.213Z" }, + { url = "https://files.pythonhosted.org/packages/bf/18/71969149bfeb65a629e652b752b80167fe8a6a6f6e084f1f2060801f7f31/numba-0.62.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:57cbddc53b9ee02830b828a8428757f5c218831ccc96490a314ef569d8342b7b", size = 2687330, upload-time = "2025-09-29T10:43:59.601Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7d/403be3fecae33088027bc8a95dc80a2fda1e3beff3e0e5fc4374ada3afbe/numba-0.62.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:604059730c637c7885386521bb1b0ddcbc91fd56131a6dcc54163d6f1804c872", size = 3739727, upload-time = "2025-09-29T10:42:45.922Z" }, + { url = "https://files.pythonhosted.org/packages/e0/c3/3d910d08b659a6d4c62ab3cd8cd93c4d8b7709f55afa0d79a87413027ff6/numba-0.62.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6c540880170bee817011757dc9049dba5a29db0c09b4d2349295991fe3ee55f", size = 3445490, upload-time = "2025-09-29T10:43:12.692Z" }, + { url = "https://files.pythonhosted.org/packages/5b/82/9d425c2f20d9f0a37f7cb955945a553a00fa06a2b025856c3550227c5543/numba-0.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:03de6d691d6b6e2b76660ba0f38f37b81ece8b2cc524a62f2a0cfae2bfb6f9da", size = 2745550, upload-time = "2025-09-29T10:44:20.571Z" }, + { url = "https://files.pythonhosted.org/packages/5e/fa/30fa6873e9f821c0ae755915a3ca444e6ff8d6a7b6860b669a3d33377ac7/numba-0.62.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:1b743b32f8fa5fff22e19c2e906db2f0a340782caf024477b97801b918cf0494", size = 2685346, upload-time = "2025-09-29T10:43:43.677Z" }, + { url = "https://files.pythonhosted.org/packages/a9/d5/504ce8dc46e0dba2790c77e6b878ee65b60fe3e7d6d0006483ef6fde5a97/numba-0.62.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90fa21b0142bcf08ad8e32a97d25d0b84b1e921bc9423f8dda07d3652860eef6", size = 2688139, upload-time = "2025-09-29T10:44:04.894Z" }, + { url = "https://files.pythonhosted.org/packages/50/5f/6a802741176c93f2ebe97ad90751894c7b0c922b52ba99a4395e79492205/numba-0.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6ef84d0ac19f1bf80431347b6f4ce3c39b7ec13f48f233a48c01e2ec06ecbc59", size = 3796453, upload-time = "2025-09-29T10:42:52.771Z" }, + { url = "https://files.pythonhosted.org/packages/7e/df/efd21527d25150c4544eccc9d0b7260a5dec4b7e98b5a581990e05a133c0/numba-0.62.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9315cc5e441300e0ca07c828a627d92a6802bcbf27c5487f31ae73783c58da53", size = 3496451, upload-time = "2025-09-29T10:43:19.279Z" }, + { url = "https://files.pythonhosted.org/packages/80/44/79bfdab12a02796bf4f1841630355c82b5a69933b1d50eb15c7fa37dabe8/numba-0.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:44e3aa6228039992f058f5ebfcfd372c83798e9464297bdad8cc79febcf7891e", size = 2745552, upload-time = "2025-09-29T10:44:26.399Z" }, ] [[package]] name = "numexpr" -version = "2.12.1" +version = "2.14.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/08/211c9ae8a230f20976f3b0b9a3308264c62bd05caf92aba7c59beebf6049/numexpr-2.12.1.tar.gz", hash = "sha256:e239faed0af001d1f1ea02934f7b3bb2bb6711ddb98e7a7bef61be5f45ff54ab", size = 115053, upload-time = "2025-09-11T11:04:04.36Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/2f/fdba158c9dbe5caca9c3eca3eaffffb251f2fb8674bf8e2d0aed5f38d319/numexpr-2.14.1.tar.gz", hash = "sha256:4be00b1086c7b7a5c32e31558122b7b80243fe098579b170967da83f3152b48b", size = 119400, upload-time = "2025-10-13T16:17:27.351Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/a1/e10d3812e352eeedacea964ae7078181f5da659f77f65f4ff75aca67372c/numexpr-2.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ac38131930d6a1c4760f384621b9bd6fd8ab557147e81b7bcce777d557ee81", size = 154204, upload-time = "2025-09-11T11:02:20.607Z" }, - { url = "https://files.pythonhosted.org/packages/a2/fc/8e30453e82ffa2a25ccc263a69cb90bad4c195ce91d2c53c6d8699564b95/numexpr-2.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea09d6e669de2f7a92228d38d58ca0e59eeb83100a9b93b6467547ffdf93ceeb", size = 144226, upload-time = "2025-09-11T11:02:21.957Z" }, - { url = "https://files.pythonhosted.org/packages/3d/3a/4ea9dca5d82e8654ad54f788af6215d72ad9afc650f8f21098923391b8a8/numexpr-2.12.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:05ec71d3feae4a96c177d696de608d6003de96a0ed6c725e229d29c6ea495a2e", size = 422124, upload-time = "2025-09-11T11:02:23.017Z" }, - { url = "https://files.pythonhosted.org/packages/4e/42/26432c6d691c2534edcdd66d8c8aefeac90a71b6c767ab569609d2683869/numexpr-2.12.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09375dbc588c1042e99963289bcf2092d427a27e680ad267fe7e83fd1913d57f", size = 411888, upload-time = "2025-09-11T11:02:24.525Z" }, - { url = "https://files.pythonhosted.org/packages/49/20/c00814929daad00193e3d07f176066f17d83c064dec26699bd02e64cefbd/numexpr-2.12.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c6a16946a7a9c6fe6e68da87b822eaa9c2edb0e0d36885218c1b8122772f8068", size = 1387205, upload-time = "2025-09-11T11:02:25.701Z" }, - { url = "https://files.pythonhosted.org/packages/a8/1f/61c7d82321face677fb8fdd486c1a8fe64bcbcf184f65cc76c8ff2ee0c19/numexpr-2.12.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aa47f6d3798e9f9677acdea40ff6dd72fd0f2993b87fc1a85e120acbac99323b", size = 1434537, upload-time = "2025-09-11T11:02:26.937Z" }, - { url = "https://files.pythonhosted.org/packages/09/0e/7996ad143e2a5b4f295da718dba70c2108e6070bcff494c4a55f0b19c315/numexpr-2.12.1-cp311-cp311-win32.whl", hash = "sha256:d77311ce7910c14ebf45dec6ac98a597493b63e146a86bfd94128bdcdd7d2a3f", size = 156808, upload-time = "2025-09-11T11:02:28.126Z" }, - { url = "https://files.pythonhosted.org/packages/ce/7b/6ea78f0f5a39057cc10057bcd0d9e814ff60dc3698cbcd36b178c7533931/numexpr-2.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:4c3d6e524c4a386bc77cd3472b370c1bbe50e23c0a6d66960a006ad90db61d4d", size = 151235, upload-time = "2025-09-11T11:02:29.098Z" }, - { url = "https://files.pythonhosted.org/packages/7b/17/817f21537fc7827b55691990e44f1260e295be7e68bb37d4bc8741439723/numexpr-2.12.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cba7e922b813fd46415fbeac618dd78169a6acb6bd10e6055c1cd8a8f8bebd6e", size = 153915, upload-time = "2025-09-11T11:02:30.15Z" }, - { url = "https://files.pythonhosted.org/packages/0a/11/65d9d918339e6b9116f8cda9210249a3127843aef9f147d50cd2dad10d60/numexpr-2.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33e5f20bc5a64c163beeed6c57e75497247c779531266e255f93c76c57248a49", size = 144358, upload-time = "2025-09-11T11:02:31.173Z" }, - { url = "https://files.pythonhosted.org/packages/64/1d/8d349126ea9c00002b574aa5310a5eb669d3cf4e82e45ff643aa01ac48fe/numexpr-2.12.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:59958402930d13fafbf8c9fdff5b0866f0ea04083f877743b235447725aaea97", size = 423752, upload-time = "2025-09-11T11:02:32.208Z" }, - { url = "https://files.pythonhosted.org/packages/ba/4a/a16aba2aa141c6634bf619bf8d069942c3f875b71ae0650172bcff0200ec/numexpr-2.12.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:12bb47518bfbc740afe4119fe141d20e715ab29e910250c96954d2794c0e6aa4", size = 413612, upload-time = "2025-09-11T11:02:33.656Z" }, - { url = "https://files.pythonhosted.org/packages/d0/61/91b85d42541a6517cc1a9f9dabc730acc56b724f4abdc5c84513558a0c79/numexpr-2.12.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e579d9a4a183f09affe102577e757e769150c0145c3ee46fbd00345d531d42b", size = 1388903, upload-time = "2025-09-11T11:02:35.229Z" }, - { url = "https://files.pythonhosted.org/packages/8d/58/2913b7938bd656e412fd41213dcd56cb72978a72d3b03636ab021eadc4ee/numexpr-2.12.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:69ba864878665f4289ef675997276439a854012044b442ce9048a03e39b8191e", size = 1436092, upload-time = "2025-09-11T11:02:36.363Z" }, - { url = "https://files.pythonhosted.org/packages/fc/31/c1863597c26d92554af29a3fff5b05d4c1885cf5450a690724c7cee04af9/numexpr-2.12.1-cp312-cp312-win32.whl", hash = "sha256:713410f76c0bbe08947c3d49477db05944ce0094449845591859e250866ba074", size = 156948, upload-time = "2025-09-11T11:02:37.518Z" }, - { url = "https://files.pythonhosted.org/packages/f5/ca/c9bc0f460d352ab5934d659a4cb5bc9529e20e78ac60f906d7e41cbfbd42/numexpr-2.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:c32f934066608a32501e06d99b93e6f2dded33606905f9af40e1f4649973ae6e", size = 151370, upload-time = "2025-09-11T11:02:38.445Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a3/67999bdd1ed1f938d38f3fedd4969632f2f197b090e50505f7cc1fa82510/numexpr-2.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2d03fcb4644a12f70a14d74006f72662824da5b6128bf1bcd10cc3ed80e64c34", size = 163195, upload-time = "2025-10-13T16:16:31.212Z" }, + { url = "https://files.pythonhosted.org/packages/25/95/d64f680ea1fc56d165457287e0851d6708800f9fcea346fc1b9957942ee6/numexpr-2.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2773ee1133f77009a1fc2f34fe236f3d9823779f5f75450e183137d49f00499f", size = 152088, upload-time = "2025-10-13T16:16:33.186Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7f/3bae417cb13ae08afd86d08bb0301c32440fe0cae4e6262b530e0819aeda/numexpr-2.14.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ebe4980f9494b9f94d10d2e526edc29e72516698d3bf95670ba79415492212a4", size = 451126, upload-time = "2025-10-13T16:13:22.248Z" }, + { url = "https://files.pythonhosted.org/packages/4c/1a/edbe839109518364ac0bd9e918cf874c755bb2c128040e920f198c494263/numexpr-2.14.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a381e5e919a745c9503bcefffc1c7f98c972c04ec58fc8e999ed1a929e01ba6", size = 442012, upload-time = "2025-10-13T16:14:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/b1/be4ce99bff769a5003baddac103f34681997b31d4640d5a75c0e8ed59c78/numexpr-2.14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d08856cfc1b440eb1caaa60515235369654321995dd68eb9377577392020f6cb", size = 1415975, upload-time = "2025-10-13T16:13:26.088Z" }, + { url = "https://files.pythonhosted.org/packages/e7/33/b33b8fdc032a05d9ebb44a51bfcd4b92c178a2572cd3e6c1b03d8a4b45b2/numexpr-2.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03130afa04edf83a7b590d207444f05a00363c9b9ea5d81c0f53b1ea13fad55a", size = 1464683, upload-time = "2025-10-13T16:14:58.87Z" }, + { url = "https://files.pythonhosted.org/packages/d0/b2/ddcf0ac6cf0a1d605e5aecd4281507fd79a9628a67896795ab2e975de5df/numexpr-2.14.1-cp311-cp311-win32.whl", hash = "sha256:db78fa0c9fcbaded3ae7453faf060bd7a18b0dc10299d7fcd02d9362be1213ed", size = 166838, upload-time = "2025-10-13T16:17:06.765Z" }, + { url = "https://files.pythonhosted.org/packages/64/72/4ca9bd97b2eb6dce9f5e70a3b6acec1a93e1fb9b079cb4cba2cdfbbf295d/numexpr-2.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:e9b2f957798c67a2428be96b04bce85439bed05efe78eb78e4c2ca43737578e7", size = 160069, upload-time = "2025-10-13T16:17:08.752Z" }, + { url = "https://files.pythonhosted.org/packages/9d/20/c473fc04a371f5e2f8c5749e04505c13e7a8ede27c09e9f099b2ad6f43d6/numexpr-2.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ebae0ab18c799b0e6b8c5a8d11e1fa3848eb4011271d99848b297468a39430", size = 162790, upload-time = "2025-10-13T16:16:34.903Z" }, + { url = "https://files.pythonhosted.org/packages/45/93/b6760dd1904c2a498e5f43d1bb436f59383c3ddea3815f1461dfaa259373/numexpr-2.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47041f2f7b9e69498fb311af672ba914a60e6e6d804011caacb17d66f639e659", size = 152196, upload-time = "2025-10-13T16:16:36.593Z" }, + { url = "https://files.pythonhosted.org/packages/72/94/cc921e35593b820521e464cbbeaf8212bbdb07f16dc79fe283168df38195/numexpr-2.14.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d686dfb2c1382d9e6e0ee0b7647f943c1886dba3adbf606c625479f35f1956c1", size = 452468, upload-time = "2025-10-13T16:13:29.531Z" }, + { url = "https://files.pythonhosted.org/packages/d9/43/560e9ba23c02c904b5934496486d061bcb14cd3ebba2e3cf0e2dccb6c22b/numexpr-2.14.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee6d4fbbbc368e6cdd0772734d6249128d957b3b8ad47a100789009f4de7083", size = 443631, upload-time = "2025-10-13T16:15:02.473Z" }, + { url = "https://files.pythonhosted.org/packages/7b/6c/78f83b6219f61c2c22d71ab6e6c2d4e5d7381334c6c29b77204e59edb039/numexpr-2.14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3a2839efa25f3c8d4133252ea7342d8f81226c7c4dda81f97a57e090b9d87a48", size = 1417670, upload-time = "2025-10-13T16:13:33.464Z" }, + { url = "https://files.pythonhosted.org/packages/0e/bb/1ccc9dcaf46281568ce769888bf16294c40e98a5158e4b16c241de31d0d3/numexpr-2.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9f9137f1351b310436662b5dc6f4082a245efa8950c3b0d9008028df92fefb9b", size = 1466212, upload-time = "2025-10-13T16:15:12.828Z" }, + { url = "https://files.pythonhosted.org/packages/31/9f/203d82b9e39dadd91d64bca55b3c8ca432e981b822468dcef41a4418626b/numexpr-2.14.1-cp312-cp312-win32.whl", hash = "sha256:36f8d5c1bd1355df93b43d766790f9046cccfc1e32b7c6163f75bcde682cda07", size = 166996, upload-time = "2025-10-13T16:17:10.369Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/ffe750b5452eb66de788c34e7d21ec6d886abb4d7c43ad1dc88ceb3d998f/numexpr-2.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:fdd886f4b7dbaf167633ee396478f0d0aa58ea2f9e7ccc3c6431019623e8d68f", size = 160187, upload-time = "2025-10-13T16:17:11.974Z" }, ] [[package]] @@ -3621,7 +3672,7 @@ wheels = [ [[package]] name = "onnxruntime" -version = "1.22.1" +version = "1.23.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coloredlogs" }, @@ -3632,19 +3683,21 @@ dependencies = [ { name = "sympy" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/82/ff/4a1a6747e039ef29a8d4ee4510060e9a805982b6da906a3da2306b7a3be6/onnxruntime-1.22.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:f4581bccb786da68725d8eac7c63a8f31a89116b8761ff8b4989dc58b61d49a0", size = 34324148, upload-time = "2025-07-10T19:15:26.584Z" }, - { url = "https://files.pythonhosted.org/packages/0b/05/9f1929723f1cca8c9fb1b2b97ac54ce61362c7201434d38053ea36ee4225/onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7ae7526cf10f93454beb0f751e78e5cb7619e3b92f9fc3bd51aa6f3b7a8977e5", size = 14473779, upload-time = "2025-07-10T19:15:30.183Z" }, - { url = "https://files.pythonhosted.org/packages/59/f3/c93eb4167d4f36ea947930f82850231f7ce0900cb00e1a53dc4995b60479/onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f6effa1299ac549a05c784d50292e3378dbbf010346ded67400193b09ddc2f04", size = 16460799, upload-time = "2025-07-10T19:15:33.005Z" }, - { url = "https://files.pythonhosted.org/packages/a8/01/e536397b03e4462d3260aee5387e6f606c8fa9d2b20b1728f988c3c72891/onnxruntime-1.22.1-cp311-cp311-win_amd64.whl", hash = "sha256:f28a42bb322b4ca6d255531bb334a2b3e21f172e37c1741bd5e66bc4b7b61f03", size = 12689881, upload-time = "2025-07-10T19:15:35.501Z" }, - { url = "https://files.pythonhosted.org/packages/48/70/ca2a4d38a5deccd98caa145581becb20c53684f451e89eb3a39915620066/onnxruntime-1.22.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:a938d11c0dc811badf78e435daa3899d9af38abee950d87f3ab7430eb5b3cf5a", size = 34342883, upload-time = "2025-07-10T19:15:38.223Z" }, - { url = "https://files.pythonhosted.org/packages/29/e5/00b099b4d4f6223b610421080d0eed9327ef9986785c9141819bbba0d396/onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:984cea2a02fcc5dfea44ade9aca9fe0f7a8a2cd6f77c258fc4388238618f3928", size = 14473861, upload-time = "2025-07-10T19:15:42.911Z" }, - { url = "https://files.pythonhosted.org/packages/0a/50/519828a5292a6ccd8d5cd6d2f72c6b36ea528a2ef68eca69647732539ffa/onnxruntime-1.22.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d39a530aff1ec8d02e365f35e503193991417788641b184f5b1e8c9a6d5ce8d", size = 16475713, upload-time = "2025-07-10T19:15:45.452Z" }, - { url = "https://files.pythonhosted.org/packages/5d/54/7139d463bb0a312890c9a5db87d7815d4a8cce9e6f5f28d04f0b55fcb160/onnxruntime-1.22.1-cp312-cp312-win_amd64.whl", hash = "sha256:6a64291d57ea966a245f749eb970f4fa05a64d26672e05a83fdb5db6b7d62f87", size = 12690910, upload-time = "2025-07-10T19:15:47.478Z" }, + { url = "https://files.pythonhosted.org/packages/8a/61/ee52bb2c9402cd1a0d550fc65b826c174f8eed49677dd3833ac1bfc0e35a/onnxruntime-1.23.1-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:9ba6e52fb7bc2758a61d1e421d060cf71d5e4259f95ea8a6f72320ae4415f229", size = 17194265, upload-time = "2025-10-08T04:25:24.479Z" }, + { url = "https://files.pythonhosted.org/packages/d3/67/67122b7b4138815090e0d304c8893fefb77370066a847d08e185f04f75fe/onnxruntime-1.23.1-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:7f130f4b0d31ba17c8789053a641958d0d341d96a1bff578d613fb52ded218c2", size = 19150493, upload-time = "2025-10-08T04:24:21.839Z" }, + { url = "https://files.pythonhosted.org/packages/73/e6/66cebc4dcdb217ccb1027cfcbcc01d6399e999c294d986806991c144cbe7/onnxruntime-1.23.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b89fd116f20b70e1140a77286954a7715eb9347260ff2008ee7ec94994df039", size = 15216531, upload-time = "2025-10-08T04:24:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/38/47/083847220c4a429e272ce9407bc8c47fa77b62e0c787ef2cc94fe9776c1b/onnxruntime-1.23.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61139a29d536b71db6045c75462e593a53feecc19756dc222531971cd08e5efe", size = 17368047, upload-time = "2025-10-08T04:24:48.426Z" }, + { url = "https://files.pythonhosted.org/packages/ac/8e/b3d861a7d199fd9c6a0b4af9b5d813bcc853d2e4dd4dac2c70b6c23097ed/onnxruntime-1.23.1-cp311-cp311-win_amd64.whl", hash = "sha256:7973186e8eb66e32ea20cb238ae92b604091e4d1df632653ec830abf7584d0b3", size = 13466816, upload-time = "2025-10-08T04:25:15.037Z" }, + { url = "https://files.pythonhosted.org/packages/00/3c/4b4f56b5df4596d1d95aafe13cbc987d050a89364ff5b2f90308376901fb/onnxruntime-1.23.1-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:564d6add1688efdb0720cf2158b50314fc35b744ad2623155ee3b805c381d9ce", size = 17194708, upload-time = "2025-10-08T04:25:27.188Z" }, + { url = "https://files.pythonhosted.org/packages/b4/97/05529b97142c1a09bde2caefea4fd29f71329b9275b52bacdbc2c4f9e964/onnxruntime-1.23.1-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:3864c39307714eff1753149215ad86324a9372e3172a0275d5b16ffd296574bf", size = 19152841, upload-time = "2025-10-08T04:24:24.157Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b9/1232fd295fa9c818aa2a7883d87a2f864fb5edee56ec757c6e857fdd1863/onnxruntime-1.23.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e6b6b5ea80a96924f67fe1e5519f6c6f9cd716fdb5a4fd1ecb4f2b0971e8d00", size = 15223749, upload-time = "2025-10-08T04:24:08.088Z" }, + { url = "https://files.pythonhosted.org/packages/c4/b0/4663a333a82c77f159e48fe8639b1f03e4a05036625be9129c20c4d71d12/onnxruntime-1.23.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:576502dad714ffe5f3b4e1918c5b3368766b222063c585e5fd88415c063e4c80", size = 17378483, upload-time = "2025-10-08T04:24:50.712Z" }, + { url = "https://files.pythonhosted.org/packages/7c/60/8100d98690cbf1de03e08d1f3eff33ff00c652806c7130658a48a8f60584/onnxruntime-1.23.1-cp312-cp312-win_amd64.whl", hash = "sha256:1b89b7c4d4c00a67debc2b0a1484d7f51b23fef85fbd80ac83ed2d17b2161bd6", size = 13467773, upload-time = "2025-10-08T04:25:17.097Z" }, ] [[package]] name = "openai" -version = "1.61.1" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -3656,9 +3709,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d9/cf/61e71ce64cf0a38f029da0f9a5f10c9fa0e69a7a977b537126dac50adfea/openai-1.61.1.tar.gz", hash = "sha256:ce1851507218209961f89f3520e06726c0aa7d0512386f0f977e3ac3e4f2472e", size = 350784, upload-time = "2025-02-05T14:34:15.873Z" } +sdist = { url = "https://files.pythonhosted.org/packages/de/90/8f26554d24d63ed4f94d33c24271559863223a67e624f4d2e65ba8e48dca/openai-2.3.0.tar.gz", hash = "sha256:8d213ee5aaf91737faea2d7fc1cd608657a5367a18966372a3756ceaabfbd812", size = 589616, upload-time = "2025-10-10T01:12:50.851Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/b6/2e2a011b2dc27a6711376808b4cd8c922c476ea0f1420b39892117fa8563/openai-1.61.1-py3-none-any.whl", hash = "sha256:72b0826240ce26026ac2cd17951691f046e5be82ad122d20a8e1b30ca18bd11e", size = 463126, upload-time = "2025-02-05T14:34:13.643Z" }, + { url = "https://files.pythonhosted.org/packages/9c/5b/4be258ff072ed8ee15f6bfd8d5a1a4618aa4704b127c0c5959212ad177d6/openai-2.3.0-py3-none-any.whl", hash = "sha256:a7aa83be6f7b0ab2e4d4d7bcaf36e3d790874c0167380c5d0afd0ed99a86bd7b", size = 999768, upload-time = "2025-10-10T01:12:48.647Z" }, ] [[package]] @@ -3679,7 +3732,7 @@ wheels = [ [[package]] name = "openinference-instrumentation" -version = "0.1.38" +version = "0.1.40" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "openinference-semantic-conventions" }, @@ -3687,18 +3740,18 @@ dependencies = [ { name = "opentelemetry-sdk" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fa/87/71c599f804203077f3766e7c6ce831cdfd0ca202278c35877a704e00b2cf/openinference_instrumentation-0.1.38.tar.gz", hash = "sha256:b45e5d19b5c0d14e884a11ed5b888deda03d955c6e6f4478d8cefd3edaea089d", size = 23749, upload-time = "2025-09-02T21:06:22.025Z" } +sdist = { url = "https://files.pythonhosted.org/packages/37/59/750c25a353260a72287e618b9ccabd57f02db6bfd571c6dbf132202abeff/openinference_instrumentation-0.1.40.tar.gz", hash = "sha256:3080785479793a56023806c71dccbc39418925947407667794c651f992f700a2", size = 23824, upload-time = "2025-10-10T03:48:48.606Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/f7/72bd2dbb8bbdd785512c9d128f2056e2eaadccfaecb09d2ae59bde6d4af2/openinference_instrumentation-0.1.38-py3-none-any.whl", hash = "sha256:5c45d73c5f3c79e9d9e44fbf4b2c3bdae514be74396cc1880cb845b9b7acc78f", size = 29885, upload-time = "2025-09-02T21:06:20.845Z" }, + { url = "https://files.pythonhosted.org/packages/c2/fd/2b6ea9d95f3eb1deba10975a14b80d7fe79528258111771580a0437d4f44/openinference_instrumentation-0.1.40-py3-none-any.whl", hash = "sha256:d2e894f25addb1dfba563789213139876c5a01fca0a1fa8aa52a455a988a11d4", size = 29967, upload-time = "2025-10-10T03:48:46.518Z" }, ] [[package]] name = "openinference-semantic-conventions" -version = "0.1.21" +version = "0.1.24" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/75/0f/b794eb009846d4b10af50e205a323ca359f284563ef4d1778f35a80522ac/openinference_semantic_conventions-0.1.21.tar.gz", hash = "sha256:328405b9f79ff72a659c7712b8429c0d7ea68c6a4a1679e3eb44372aa228119b", size = 12534, upload-time = "2025-06-13T05:22:18.982Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/15/be7566a4bba4b57f7c70b088f42735f2005e2c0adce646a537f63dcf21de/openinference_semantic_conventions-0.1.24.tar.gz", hash = "sha256:3223b8c3958525457a369d58ebf0c56230a1f00567ae1e99f1c2049a8ac2cacd", size = 12741, upload-time = "2025-10-10T03:49:13.987Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/4d/092766f8e610f2c513e483c4adc892eea1634945022a73371fe01f621165/openinference_semantic_conventions-0.1.21-py3-none-any.whl", hash = "sha256:acde8282c20da1de900cdc0d6258a793ec3eb8031bfc496bd823dae17d32e326", size = 10167, upload-time = "2025-06-13T05:22:18.118Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c5/fa81b19042b387826151f984a91fa3d0b52b08374e4d5786521ac2d9e704/openinference_semantic_conventions-0.1.24-py3-none-any.whl", hash = "sha256:b2d650ca7e39c5fb02bf908b8049d6ece2a2657757448e1925a38b59548a80b3", size = 10373, upload-time = "2025-10-10T03:49:00.318Z" }, ] [[package]] @@ -4019,7 +4072,7 @@ wheels = [ [[package]] name = "opik" -version = "1.8.72" +version = "1.8.74" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "boto3-stubs", extra = ["bedrock-runtime"] }, @@ -4038,21 +4091,21 @@ dependencies = [ { name = "tqdm" }, { name = "uuid6" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/aa/08/679b60db21994cf3318d4cdd1d08417c1877b79ac20971a8d80f118c9455/opik-1.8.72.tar.gz", hash = "sha256:26fcb003dc609d96b52eaf6a12fb16eb2b69eb0d1b35d88279ec612925d23944", size = 409774, upload-time = "2025-10-10T13:22:38.2Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/31/887f133aa82aeb4cb8a01d98ad6ae73cb0580c2c9395d76bae1d67dbb6f6/opik-1.8.74.tar.gz", hash = "sha256:4b18248dbd741dab16dab399c1ab7197f1f6c6775ee06285ff07d3d22e1810de", size = 412504, upload-time = "2025-10-13T13:43:03.117Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/f5/04d35af828d127de65a36286ce5b53e7310087a6b55a56f398daa7f0c9a6/opik-1.8.72-py3-none-any.whl", hash = "sha256:697e361a8364666f36aeb197aaba7ffa0696b49f04d2257b733d436749c90a8c", size = 768233, upload-time = "2025-10-10T13:22:36.352Z" }, + { url = "https://files.pythonhosted.org/packages/37/5d/11c12e2471880effa7a597d96bce848271fa93007f7f543ed607fb31822a/opik-1.8.74-py3-none-any.whl", hash = "sha256:34ffbff2c447da117e58bcc2fdf53b3b534dd1ffe9a293eb912f5419fc9904c3", size = 772547, upload-time = "2025-10-13T13:43:01.29Z" }, ] [[package]] name = "optype" -version = "0.13.4" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/7f/daa32a35b2a6a564a79723da49c0ddc464c462e67a906fc2b66a0d64f28e/optype-0.13.4.tar.gz", hash = "sha256:131d8e0f1c12d8095d553e26b54598597133830983233a6a2208886e7a388432", size = 99547, upload-time = "2025-08-19T19:52:44.242Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/ca/d3a2abcf12cc8c18ccac1178ef87ab50a235bf386d2401341776fdad18aa/optype-0.14.0.tar.gz", hash = "sha256:925cf060b7d1337647f880401f6094321e7d8e837533b8e159b9a92afa3157c6", size = 100880, upload-time = "2025-10-01T04:49:56.232Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/37/bb/b51940f2d91071325d5ae2044562aa698470a105474d9317b9dbdaad63df/optype-0.13.4-py3-none-any.whl", hash = "sha256:500c89cfac82e2f9448a54ce0a5d5c415b6976b039c2494403cd6395bd531979", size = 87919, upload-time = "2025-08-19T19:52:41.314Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/11b0eb65eeafa87260d36858b69ec4e0072d09e37ea6714280960030bc93/optype-0.14.0-py3-none-any.whl", hash = "sha256:50d02edafd04edf2e5e27d6249760a51b2198adb9f6ffd778030b3d2806b026b", size = 89465, upload-time = "2025-10-01T04:49:54.674Z" }, ] [package.optional-dependencies] @@ -4223,15 +4276,15 @@ wheels = [ [[package]] name = "pdfminer-six" -version = "20240706" +version = "20250506" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "charset-normalizer" }, { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e3/37/63cb918ffa21412dd5d54e32e190e69bfc340f3d6aa072ad740bec9386bb/pdfminer.six-20240706.tar.gz", hash = "sha256:c631a46d5da957a9ffe4460c5dce21e8431dabb615fee5f9f4400603a58d95a6", size = 7363505, upload-time = "2024-07-06T13:48:50.795Z" } +sdist = { url = "https://files.pythonhosted.org/packages/78/46/5223d613ac4963e1f7c07b2660fe0e9e770102ec6bda8c038400113fb215/pdfminer_six-20250506.tar.gz", hash = "sha256:b03cc8df09cf3c7aba8246deae52e0bca7ebb112a38895b5e1d4f5dd2b8ca2e7", size = 7387678, upload-time = "2025-05-06T16:17:00.787Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/67/7d/44d6b90e5a293d3a975cefdc4e12a932ebba814995b2a07e37e599dd27c6/pdfminer.six-20240706-py3-none-any.whl", hash = "sha256:f4f70e74174b4b3542fcb8406a210b6e2e27cd0f0b5fd04534a8cc0d8951e38c", size = 5615414, upload-time = "2024-07-06T13:48:48.408Z" }, + { url = "https://files.pythonhosted.org/packages/73/16/7a432c0101fa87457e75cb12c879e1749c5870a786525e2e0f42871d6462/pdfminer_six-20250506-py3-none-any.whl", hash = "sha256:d81ad173f62e5f841b53a8ba63af1a4a355933cfc0ffabd608e568b9193909e3", size = 5620187, upload-time = "2025-05-06T16:16:58.669Z" }, ] [[package]] @@ -4302,11 +4355,11 @@ wheels = [ [[package]] name = "platformdirs" -version = "4.4.0" +version = "4.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, + { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, ] [[package]] @@ -4329,7 +4382,7 @@ wheels = [ [[package]] name = "polyfile-weave" -version = "0.5.6" +version = "0.5.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "abnf" }, @@ -4347,9 +4400,21 @@ dependencies = [ { name = "pyyaml" }, { name = "setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/16/11/7e0b3908a4f5436197b1fc11713c628cd7f9136dc7c1fb00ac8879991f87/polyfile_weave-0.5.6.tar.gz", hash = "sha256:a9fc41b456272c95a3788a2cab791e052acc24890c512fc5a6f9f4e221d24ed1", size = 5987173, upload-time = "2025-07-28T20:26:32.092Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/c3/5a2a2ba06850bc5ec27f83ac8b92210dff9ff6736b2c42f700b489b3fd86/polyfile_weave-0.5.7.tar.gz", hash = "sha256:c3d863f51c30322c236bdf385e116ac06d4e7de9ec25a3aae14d42b1d528e33b", size = 5987445, upload-time = "2025-09-22T19:21:11.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/19/63/04c5c7c2093cf69c9eeea338f4757522a5d048703a35b3ac8a5580ed2369/polyfile_weave-0.5.6-py3-none-any.whl", hash = "sha256:658e5b6ed040a973279a0cd7f54f4566249c85b977dee556788fa6f903c1d30b", size = 1655007, upload-time = "2025-07-28T20:26:30.132Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f6/d1efedc0f9506e47699616e896d8efe39e8f0b6a7d1d590c3e97455ecf4a/polyfile_weave-0.5.7-py3-none-any.whl", hash = "sha256:880454788bc383408bf19eefd6d1c49a18b965d90c99bccb58f4da65870c82dd", size = 1655397, upload-time = "2025-09-22T19:21:09.142Z" }, +] + +[[package]] +name = "pondpond" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "madoka" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/9b/8411458ca8ce8b5b9b135e4a19823f1caf958ca9985883db104323492982/pondpond-1.4.1.tar.gz", hash = "sha256:8afa34b869d1434d21dd2ec12644abc3b1733fcda8fcf355300338a13a79bb7b", size = 15237, upload-time = "2024-03-01T07:08:06.756Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/d4/f18d6985157cc68f76469480182cbee2a03a45858456955acf57f9dcbb4c/pondpond-1.4.1-py3-none-any.whl", hash = "sha256:641028ead4e8018ca6de1220c660ddd6d6fbf62a60e72f410655dd0451d82880", size = 14498, upload-time = "2024-03-01T07:08:04.63Z" }, ] [[package]] @@ -4380,7 +4445,7 @@ wheels = [ [[package]] name = "posthog" -version = "6.7.4" +version = "6.7.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff" }, @@ -4390,9 +4455,9 @@ dependencies = [ { name = "six" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0f/40/d7f585e09e47f492ebaeb8048a8e2ce5d9f49a3896856a7a975cbc1484fa/posthog-6.7.4.tar.gz", hash = "sha256:2bfa74f321ac18efe4a48a256d62034a506ca95477af7efa32292ed488a742c5", size = 118209, upload-time = "2025-09-05T15:29:21.517Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e2/ce/11d6fa30ab517018796e1d675498992da585479e7079770ec8fa99a61561/posthog-6.7.6.tar.gz", hash = "sha256:ee5c5ad04b857d96d9b7a4f715e23916a2f206bfcf25e5a9d328a3d27664b0d3", size = 119129, upload-time = "2025-09-22T18:11:12.365Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/95/e795059ef73d480a7f11f1be201087f65207509525920897fb514a04914c/posthog-6.7.4-py3-none-any.whl", hash = "sha256:7f1872c53ec7e9a29b088a5a1ad03fa1be3b871d10d70c8bf6c2dafb91beaac5", size = 136409, upload-time = "2025-09-05T15:29:19.995Z" }, + { url = "https://files.pythonhosted.org/packages/de/84/586422d8861b5391c8414360b10f603c0b7859bb09ad688e64430ed0df7b/posthog-6.7.6-py3-none-any.whl", hash = "sha256:b09a7e65a042ec416c28874b397d3accae412a80a8b0ef3fa686fbffc99e4d4b", size = 137348, upload-time = "2025-09-22T18:11:10.807Z" }, ] [[package]] @@ -4409,43 +4474,41 @@ wheels = [ [[package]] name = "propcache" -version = "0.3.2" +version = "0.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, - { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, - { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, - { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, - { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, - { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, - { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, - { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, - { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, - { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, - { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, - { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, - { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, - { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, - { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, - { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, - { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, - { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, - { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, - { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, - { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, - { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, - { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, - { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, - { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, - { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, - { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, - { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, - { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] [[package]] @@ -4476,17 +4539,18 @@ wheels = [ [[package]] name = "psutil" -version = "7.0.0" +version = "7.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/31/4723d756b59344b643542936e37a31d1d3204bcdc42a7daa8ee9eb06fb50/psutil-7.1.0.tar.gz", hash = "sha256:655708b3c069387c8b77b072fc429a57d0e214221d01c0a772df7dfedcb3bcd2", size = 497660, upload-time = "2025-09-17T20:14:52.902Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051, upload-time = "2025-02-13T21:54:12.36Z" }, - { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535, upload-time = "2025-02-13T21:54:16.07Z" }, - { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" }, - { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" }, - { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053, upload-time = "2025-02-13T21:54:34.31Z" }, - { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" }, + { url = "https://files.pythonhosted.org/packages/46/62/ce4051019ee20ce0ed74432dd73a5bb087a6704284a470bb8adff69a0932/psutil-7.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76168cef4397494250e9f4e73eb3752b146de1dd950040b29186d0cce1d5ca13", size = 245242, upload-time = "2025-09-17T20:14:56.126Z" }, + { url = "https://files.pythonhosted.org/packages/38/61/f76959fba841bf5b61123fbf4b650886dc4094c6858008b5bf73d9057216/psutil-7.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:5d007560c8c372efdff9e4579c2846d71de737e4605f611437255e81efcca2c5", size = 246682, upload-time = "2025-09-17T20:14:58.25Z" }, + { url = "https://files.pythonhosted.org/packages/88/7a/37c99d2e77ec30d63398ffa6a660450b8a62517cabe44b3e9bae97696e8d/psutil-7.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22e4454970b32472ce7deaa45d045b34d3648ce478e26a04c7e858a0a6e75ff3", size = 287994, upload-time = "2025-09-17T20:14:59.901Z" }, + { url = "https://files.pythonhosted.org/packages/9d/de/04c8c61232f7244aa0a4b9a9fbd63a89d5aeaf94b2fc9d1d16e2faa5cbb0/psutil-7.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c70e113920d51e89f212dd7be06219a9b88014e63a4cec69b684c327bc474e3", size = 291163, upload-time = "2025-09-17T20:15:01.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/58/c4f976234bf6d4737bc8c02a81192f045c307b72cf39c9e5c5a2d78927f6/psutil-7.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d4a113425c037300de3ac8b331637293da9be9713855c4fc9d2d97436d7259d", size = 293625, upload-time = "2025-09-17T20:15:04.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/87/157c8e7959ec39ced1b11cc93c730c4fb7f9d408569a6c59dbd92ceb35db/psutil-7.1.0-cp37-abi3-win32.whl", hash = "sha256:09ad740870c8d219ed8daae0ad3b726d3bf9a028a198e7f3080f6a1888b99bca", size = 244812, upload-time = "2025-09-17T20:15:07.462Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/b44c4f697276a7a95b8e94d0e320a7bf7f3318521b23de69035540b39838/psutil-7.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:57f5e987c36d3146c0dd2528cd42151cf96cd359b9d67cfff836995cc5df9a3d", size = 247965, upload-time = "2025-09-17T20:15:09.673Z" }, + { url = "https://files.pythonhosted.org/packages/26/65/1070a6e3c036f39142c2820c4b52e9243246fcfc3f96239ac84472ba361e/psutil-7.1.0-cp37-abi3-win_arm64.whl", hash = "sha256:6937cb68133e7c97b6cc9649a570c9a18ba0efebed46d8c5dae4c07fa1b67a07", size = 244971, upload-time = "2025-09-17T20:15:12.262Z" }, ] [[package]] @@ -4497,34 +4561,28 @@ sdist = { url = "https://files.pythonhosted.org/packages/eb/72/4a7965cf54e341006 [[package]] name = "psycopg2-binary" -version = "2.9.10" +version = "2.9.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764, upload-time = "2024-10-16T11:24:58.126Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/8f/9feb01291d0d7a0a4c6a6bab24094135c2b59c6a81943752f632c75896d6/psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff", size = 3043397, upload-time = "2024-10-16T11:19:40.033Z" }, - { url = "https://files.pythonhosted.org/packages/15/30/346e4683532011561cd9c8dfeac6a8153dd96452fee0b12666058ab7893c/psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c", size = 3274806, upload-time = "2024-10-16T11:19:43.5Z" }, - { url = "https://files.pythonhosted.org/packages/66/6e/4efebe76f76aee7ec99166b6c023ff8abdc4e183f7b70913d7c047701b79/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c", size = 2851370, upload-time = "2024-10-16T11:19:46.986Z" }, - { url = "https://files.pythonhosted.org/packages/7f/fd/ff83313f86b50f7ca089b161b8e0a22bb3c319974096093cd50680433fdb/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb", size = 3080780, upload-time = "2024-10-16T11:19:50.242Z" }, - { url = "https://files.pythonhosted.org/packages/e6/c4/bfadd202dcda8333a7ccafdc51c541dbdfce7c2c7cda89fa2374455d795f/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341", size = 3264583, upload-time = "2024-10-16T11:19:54.424Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f1/09f45ac25e704ac954862581f9f9ae21303cc5ded3d0b775532b407f0e90/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a", size = 3019831, upload-time = "2024-10-16T11:19:57.762Z" }, - { url = "https://files.pythonhosted.org/packages/9e/2e/9beaea078095cc558f215e38f647c7114987d9febfc25cb2beed7c3582a5/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b", size = 2871822, upload-time = "2024-10-16T11:20:04.693Z" }, - { url = "https://files.pythonhosted.org/packages/01/9e/ef93c5d93f3dc9fc92786ffab39e323b9aed066ba59fdc34cf85e2722271/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7", size = 2820975, upload-time = "2024-10-16T11:20:11.401Z" }, - { url = "https://files.pythonhosted.org/packages/a5/f0/049e9631e3268fe4c5a387f6fc27e267ebe199acf1bc1bc9cbde4bd6916c/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e", size = 2919320, upload-time = "2024-10-16T11:20:17.959Z" }, - { url = "https://files.pythonhosted.org/packages/dc/9a/bcb8773b88e45fb5a5ea8339e2104d82c863a3b8558fbb2aadfe66df86b3/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68", size = 2957617, upload-time = "2024-10-16T11:20:24.711Z" }, - { url = "https://files.pythonhosted.org/packages/e2/6b/144336a9bf08a67d217b3af3246abb1d027095dab726f0687f01f43e8c03/psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392", size = 1024618, upload-time = "2024-10-16T11:20:27.718Z" }, - { url = "https://files.pythonhosted.org/packages/61/69/3b3d7bd583c6d3cbe5100802efa5beacaacc86e37b653fc708bf3d6853b8/psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4", size = 1163816, upload-time = "2024-10-16T11:20:30.777Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771, upload-time = "2024-10-16T11:20:35.234Z" }, - { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336, upload-time = "2024-10-16T11:20:38.742Z" }, - { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637, upload-time = "2024-10-16T11:20:42.145Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097, upload-time = "2024-10-16T11:20:46.185Z" }, - { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776, upload-time = "2024-10-16T11:20:50.879Z" }, - { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968, upload-time = "2024-10-16T11:20:56.819Z" }, - { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334, upload-time = "2024-10-16T11:21:02.411Z" }, - { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722, upload-time = "2024-10-16T11:21:09.01Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132, upload-time = "2024-10-16T11:21:16.339Z" }, - { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312, upload-time = "2024-10-16T11:21:25.584Z" }, - { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191, upload-time = "2024-10-16T11:21:29.912Z" }, - { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031, upload-time = "2024-10-16T11:21:34.211Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" }, + { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" }, + { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" }, + { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" }, + { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" }, + { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" }, + { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" }, + { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, + { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, + { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, + { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, ] [[package]] @@ -4620,7 +4678,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.11.10" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -4628,9 +4686,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/54/ecab642b3bed45f7d5f59b38443dcb36ef50f85af192e6ece103dbfe9587/pydantic-2.11.10.tar.gz", hash = "sha256:dc280f0982fbda6c38fada4e476dc0a4f3aeaf9c6ad4c28df68a666ec3c61423", size = 788494, upload-time = "2025-10-04T10:40:41.338Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, + { url = "https://files.pythonhosted.org/packages/bd/1f/73c53fcbfb0b5a78f91176df41945ca466e71e9d9d836e5c522abda39ee7/pydantic-2.11.10-py3-none-any.whl", hash = "sha256:802a655709d49bd004c31e865ef37da30b540786a46bfce02333e0e24b5fe29a", size = 444823, upload-time = "2025-10-04T10:40:39.055Z" }, ] [[package]] @@ -4683,29 +4741,29 @@ wheels = [ [[package]] name = "pydantic-extra-types" -version = "2.10.5" +version = "2.10.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7e/ba/4178111ec4116c54e1dc7ecd2a1ff8f54256cdbd250e576882911e8f710a/pydantic_extra_types-2.10.5.tar.gz", hash = "sha256:1dcfa2c0cf741a422f088e0dbb4690e7bfadaaf050da3d6f80d6c3cf58a2bad8", size = 138429, upload-time = "2025-06-02T09:31:52.713Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/10/fb64987804cde41bcc39d9cd757cd5f2bb5d97b389d81aa70238b14b8a7e/pydantic_extra_types-2.10.6.tar.gz", hash = "sha256:c63d70bf684366e6bbe1f4ee3957952ebe6973d41e7802aea0b770d06b116aeb", size = 141858, upload-time = "2025-10-08T13:47:49.483Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/1a/5f4fd9e7285f10c44095a4f9fe17d0f358d1702a7c74a9278c794e8a7537/pydantic_extra_types-2.10.5-py3-none-any.whl", hash = "sha256:b60c4e23d573a69a4f1a16dd92888ecc0ef34fb0e655b4f305530377fa70e7a8", size = 38315, upload-time = "2025-06-02T09:31:51.229Z" }, + { url = "https://files.pythonhosted.org/packages/93/04/5c918669096da8d1c9ec7bb716bd72e755526103a61bc5e76a3e4fb23b53/pydantic_extra_types-2.10.6-py3-none-any.whl", hash = "sha256:6106c448316d30abf721b5b9fecc65e983ef2614399a24142d689c7546cc246a", size = 40949, upload-time = "2025-10-08T13:47:48.268Z" }, ] [[package]] name = "pydantic-settings" -version = "2.9.1" +version = "2.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/67/1d/42628a2c33e93f8e9acbde0d5d735fa0850f3e6a2f8cb1eb6c40b9a732ac/pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268", size = 163234, upload-time = "2025-04-18T16:44:48.265Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/5f/d6d641b490fd3ec2c4c13b4244d68deea3a1b970a97be64f34fb5504ff72/pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef", size = 44356, upload-time = "2025-04-18T16:44:46.617Z" }, + { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" }, ] [[package]] @@ -4733,7 +4791,7 @@ crypto = [ [[package]] name = "pymilvus" -version = "2.5.15" +version = "2.5.16" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "grpcio" }, @@ -4744,9 +4802,9 @@ dependencies = [ { name = "setuptools" }, { name = "ujson" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cd/f9/dee7f0d42979bf4cbe0bf23f8db9bf4c331b53c4c9f8692d2e027073c928/pymilvus-2.5.15.tar.gz", hash = "sha256:350396ef3bb40aa62c8a2ecaccb5c664bbb1569eef8593b74dd1d5125eb0deb2", size = 1278109, upload-time = "2025-08-21T11:57:58.416Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/e2/5613bc7b2af0ccd760177ca4255243c284cfc0f2cba3f10ff63325c4ca34/pymilvus-2.5.16.tar.gz", hash = "sha256:65f56b81806bc217cca3cf29b70a27d053dea4b1ffada910cf63a38f96381618", size = 1280614, upload-time = "2025-09-19T07:02:14.747Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/af/10a620686025e5b59889d7075f5d426e45e57a0180c4465051645a88ccb0/pymilvus-2.5.15-py3-none-any.whl", hash = "sha256:a155a3b436e2e3ca4b85aac80c92733afe0bd172c497c3bc0dfaca0b804b90c9", size = 241683, upload-time = "2025-08-21T11:57:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/c6/09/b67a55abee0a53ea50ba0de0cba6e1c0f7ca7ce2c15ffd6f40c059c25e88/pymilvus-2.5.16-py3-none-any.whl", hash = "sha256:76258a324f19c60fee247467e11cd7d6f35a64d2a9c753f5d7b1a5fa15dd6c8a", size = 243272, upload-time = "2025-09-19T07:02:12.443Z" }, ] [[package]] @@ -4800,20 +4858,20 @@ wheels = [ [[package]] name = "pyparsing" -version = "3.2.3" +version = "3.2.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274, upload-time = "2025-09-21T04:11:06.277Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" }, + { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890, upload-time = "2025-09-21T04:11:04.117Z" }, ] [[package]] name = "pypdf" -version = "6.0.0" +version = "6.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/ac/a300a03c3b34967c050677ccb16e7a4b65607ee5df9d51e8b6d713de4098/pypdf-6.0.0.tar.gz", hash = "sha256:282a99d2cc94a84a3a3159f0d9358c0af53f85b4d28d76ea38b96e9e5ac2a08d", size = 5033827, upload-time = "2025-08-11T14:22:02.352Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/85/4c0f12616db83c2e3ef580c3cfa98bd082e88fc8d02e136bad3bede1e3fa/pypdf-6.1.1.tar.gz", hash = "sha256:10f44d49bf2a82e54c3c5ba3cdcbb118f2a44fc57df8ce51d6fb9b1ed9bfbe8b", size = 5074507, upload-time = "2025-09-28T13:29:16.165Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/83/2cacc506eb322bb31b747bc06ccb82cc9aa03e19ee9c1245e538e49d52be/pypdf-6.0.0-py3-none-any.whl", hash = "sha256:56ea60100ce9f11fc3eec4f359da15e9aec3821b036c1f06d2b660d35683abb8", size = 310465, upload-time = "2025-08-11T14:22:00.481Z" }, + { url = "https://files.pythonhosted.org/packages/07/ed/adae13756d9dabdddee483fc7712905bb5585fbf6e922b1a19aca3a29cd1/pypdf-6.1.1-py3-none-any.whl", hash = "sha256:7781f99493208a37a7d4275601d883e19af24e62a525c25844d22157c2e4cde7", size = 323455, upload-time = "2025-09-28T13:29:14.392Z" }, ] [[package]] @@ -4925,6 +4983,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923, upload-time = "2025-05-26T13:58:43.487Z" }, ] +[[package]] +name = "pytest-timeout" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ac/82/4c9ecabab13363e72d880f2fb504c5f750433b2b6f16e99f4ec21ada284c/pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a", size = 17973, upload-time = "2025-05-05T19:44:34.99Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/b6/3127540ecdf1464a00e5a01ee60a1b09175f6913f0644ac748494d9c4b21/pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2", size = 14382, upload-time = "2025-05-05T19:44:33.502Z" }, +] + [[package]] name = "python-calamine" version = "0.5.3" @@ -5092,75 +5162,29 @@ wheels = [ [[package]] name = "pyyaml" -version = "6.0.2" +version = "6.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, -] - -[[package]] -name = "pyzstd" -version = "0.17.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8f/a2/54d860ccbd07e3c67e4d0321d1c29fc7963ac82cf801a078debfc4ef7c15/pyzstd-0.17.0.tar.gz", hash = "sha256:d84271f8baa66c419204c1dd115a4dec8b266f8a2921da21b81764fa208c1db6", size = 1212160, upload-time = "2025-05-10T14:14:49.764Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/29/4a/81ca9a6a759ae10a51cb72f002c149b602ec81b3a568ca6292b117f6da0d/pyzstd-0.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06d1e7afafe86b90f3d763f83d2f6b6a437a8d75119fe1ff52b955eb9df04eaa", size = 377827, upload-time = "2025-05-10T14:12:54.102Z" }, - { url = "https://files.pythonhosted.org/packages/a1/09/584c12c8a918c9311a55be0c667e57a8ee73797367299e2a9f3fc3bf7a39/pyzstd-0.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc827657f644e4510211b49f5dab6b04913216bc316206d98f9a75214361f16e", size = 297579, upload-time = "2025-05-10T14:12:55.748Z" }, - { url = "https://files.pythonhosted.org/packages/e1/89/dc74cd83f30b97f95d42b028362e32032e61a8f8e6cc2a8e47b70976d99a/pyzstd-0.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecffadaa2ee516ecea3e432ebf45348fa8c360017f03b88800dd312d62ecb063", size = 443132, upload-time = "2025-05-10T14:12:57.098Z" }, - { url = "https://files.pythonhosted.org/packages/a8/12/fe93441228a324fe75d10f5f13d5e5d5ed028068810dfdf9505d89d704a0/pyzstd-0.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:596de361948d3aad98a837c98fcee4598e51b608f7e0912e0e725f82e013f00f", size = 390644, upload-time = "2025-05-10T14:12:58.379Z" }, - { url = "https://files.pythonhosted.org/packages/9d/d1/aa7cdeb9bf8995d9df9936c71151be5f4e7b231561d553e73bbf340c2281/pyzstd-0.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd3a8d0389c103e93853bf794b9a35ac5d0d11ca3e7e9f87e3305a10f6dfa6b2", size = 478070, upload-time = "2025-05-10T14:12:59.706Z" }, - { url = "https://files.pythonhosted.org/packages/95/62/7e5c450790bfd3db954694d4d877446d0b6d192aae9c73df44511f17b75c/pyzstd-0.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1356f72c7b8bb99b942d582b61d1a93c5065e66b6df3914dac9f2823136c3228", size = 421240, upload-time = "2025-05-10T14:13:01.151Z" }, - { url = "https://files.pythonhosted.org/packages/3a/b5/d20c60678c0dfe2430f38241d118308f12516ccdb44f9edce27852ee2187/pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f514c339b013b0b0a2ed8ea6e44684524223bd043267d7644d7c3a70e74a0dd", size = 412908, upload-time = "2025-05-10T14:13:02.904Z" }, - { url = "https://files.pythonhosted.org/packages/d2/a0/3ae0f1af2982b6cdeacc2a1e1cd20869d086d836ea43e0f14caee8664101/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d4de16306821021c2d82a45454b612e2a8683d99bfb98cff51a883af9334bea0", size = 415572, upload-time = "2025-05-10T14:13:04.828Z" }, - { url = "https://files.pythonhosted.org/packages/7d/84/cb0a10c3796f4cd5f09c112cbd72405ffd019f7c0d1e2e5e99ccc803c60c/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:aeb9759c04b6a45c1b56be21efb0a738e49b0b75c4d096a38707497a7ff2be82", size = 445334, upload-time = "2025-05-10T14:13:06.5Z" }, - { url = "https://files.pythonhosted.org/packages/d6/d6/8c5cf223067b69aa63f9ecf01846535d4ba82d98f8c9deadfc0092fa16ca/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7a5b31ddeada0027e67464d99f09167cf08bab5f346c3c628b2d3c84e35e239a", size = 518748, upload-time = "2025-05-10T14:13:08.286Z" }, - { url = "https://files.pythonhosted.org/packages/bf/1c/dc7bab00a118d0ae931239b23e05bf703392005cf3bb16942b7b2286452a/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8338e4e91c52af839abcf32f1f65f3b21e2597ffe411609bdbdaf10274991bd0", size = 562487, upload-time = "2025-05-10T14:13:09.714Z" }, - { url = "https://files.pythonhosted.org/packages/e0/a4/fca96c0af643e4de38bce0dc25dab60ea558c49444c30b9dbe8b7a1714be/pyzstd-0.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:628e93862feb372b4700085ec4d1d389f1283ac31900af29591ae01019910ff3", size = 432319, upload-time = "2025-05-10T14:13:11.296Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a3/7c924478f6c14b369fec8c5cd807b069439c6ecbf98c4783c5791036d3ad/pyzstd-0.17.0-cp311-cp311-win32.whl", hash = "sha256:c27773f9c95ebc891cfcf1ef282584d38cde0a96cb8d64127953ad752592d3d7", size = 220005, upload-time = "2025-05-10T14:13:13.188Z" }, - { url = "https://files.pythonhosted.org/packages/d2/f6/d081b6b29cf00780c971b07f7889a19257dd884e64a842a5ebc406fd3992/pyzstd-0.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:c043a5766e00a2b7844705c8fa4563b7c195987120afee8f4cf594ecddf7e9ac", size = 246224, upload-time = "2025-05-10T14:13:14.478Z" }, - { url = "https://files.pythonhosted.org/packages/61/f3/f42c767cde8e3b94652baf85863c25476fd463f3bd61f73ed4a02c1db447/pyzstd-0.17.0-cp311-cp311-win_arm64.whl", hash = "sha256:efd371e41153ef55bf51f97e1ce4c1c0b05ceb59ed1d8972fc9aa1e9b20a790f", size = 223036, upload-time = "2025-05-10T14:13:15.752Z" }, - { url = "https://files.pythonhosted.org/packages/76/50/7fa47d0a13301b1ce20972aa0beb019c97f7ee8b0658d7ec66727b5967f9/pyzstd-0.17.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2ac330fc4f64f97a411b6f3fc179d2fe3050b86b79140e75a9a6dd9d6d82087f", size = 379056, upload-time = "2025-05-10T14:13:17.091Z" }, - { url = "https://files.pythonhosted.org/packages/9d/f2/67b03b1fa4e2a0b05e147cc30ac6d271d3d11017b47b30084cb4699451f4/pyzstd-0.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:725180c0c4eb2e643b7048ebfb45ddf43585b740535907f70ff6088f5eda5096", size = 298381, upload-time = "2025-05-10T14:13:18.812Z" }, - { url = "https://files.pythonhosted.org/packages/01/8b/807ff0a13cf3790fe5de85e18e10c22b96d92107d2ce88699cefd3f890cb/pyzstd-0.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c20fe0a60019685fa1f7137cb284f09e3f64680a503d9c0d50be4dd0a3dc5ec", size = 443770, upload-time = "2025-05-10T14:13:20.495Z" }, - { url = "https://files.pythonhosted.org/packages/f0/88/832d8d8147691ee37736a89ea39eaf94ceac5f24a6ce2be316ff5276a1f8/pyzstd-0.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d97f7aaadc3b6e2f8e51bfa6aa203ead9c579db36d66602382534afaf296d0db", size = 391167, upload-time = "2025-05-10T14:13:22.236Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a5/2e09bee398dfb0d94ca43f3655552a8770a6269881dc4710b8f29c7f71aa/pyzstd-0.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42dcb34c5759b59721997036ff2d94210515d3ef47a9de84814f1c51a1e07e8a", size = 478960, upload-time = "2025-05-10T14:13:23.584Z" }, - { url = "https://files.pythonhosted.org/packages/da/b5/1f3b778ad1ccc395161fab7a3bf0dfbd85232234b6657c93213ed1ceda7e/pyzstd-0.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6bf05e18be6f6c003c7129e2878cffd76fcbebda4e7ebd7774e34ae140426cbf", size = 421891, upload-time = "2025-05-10T14:13:25.417Z" }, - { url = "https://files.pythonhosted.org/packages/83/c4/6bfb4725f4f38e9fe9735697060364fb36ee67546e7e8d78135044889619/pyzstd-0.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f7c3a5144aa4fbccf37c30411f6b1db4c0f2cb6ad4df470b37929bffe6ca0", size = 413608, upload-time = "2025-05-10T14:13:26.75Z" }, - { url = "https://files.pythonhosted.org/packages/95/a2/c48b543e3a482e758b648ea025b94efb1abe1f4859c5185ff02c29596035/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9efd4007f8369fd0890701a4fc77952a0a8c4cb3bd30f362a78a1adfb3c53c12", size = 416429, upload-time = "2025-05-10T14:13:28.096Z" }, - { url = "https://files.pythonhosted.org/packages/5c/62/2d039ee4dbc8116ca1f2a2729b88a1368f076f5dadad463f165993f7afa8/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5f8add139b5fd23b95daa844ca13118197f85bd35ce7507e92fcdce66286cc34", size = 446671, upload-time = "2025-05-10T14:13:29.772Z" }, - { url = "https://files.pythonhosted.org/packages/be/ec/9ec9f0957cf5b842c751103a2b75ecb0a73cf3d99fac57e0436aab6748e0/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:259a60e8ce9460367dcb4b34d8b66e44ca3d8c9c30d53ed59ae7037622b3bfc7", size = 520290, upload-time = "2025-05-10T14:13:31.585Z" }, - { url = "https://files.pythonhosted.org/packages/cc/42/2e2f4bb641c2a9ab693c31feebcffa1d7c24e946d8dde424bba371e4fcce/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:86011a93cc3455c5d2e35988feacffbf2fa106812a48e17eb32c2a52d25a95b3", size = 563785, upload-time = "2025-05-10T14:13:32.971Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e4/25e198d382faa4d322f617d7a5ff82af4dc65749a10d90f1423af2d194f6/pyzstd-0.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:425c31bc3de80313054e600398e4f1bd229ee61327896d5d015e2cd0283c9012", size = 433390, upload-time = "2025-05-10T14:13:34.668Z" }, - { url = "https://files.pythonhosted.org/packages/ad/7c/1ab970f5404ace9d343a36a86f1bd0fcf2dc1adf1ef8886394cf0a58bd9e/pyzstd-0.17.0-cp312-cp312-win32.whl", hash = "sha256:7c4b88183bb36eb2cebbc0352e6e9fe8e2d594f15859ae1ef13b63ebc58be158", size = 220291, upload-time = "2025-05-10T14:13:36.005Z" }, - { url = "https://files.pythonhosted.org/packages/b2/52/d35bf3e4f0676a74359fccef015eabe3ceaba95da4ac2212f8be4dde16de/pyzstd-0.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c31947e0120468342d74e0fa936d43f7e1dad66a2262f939735715aa6c730e8", size = 246451, upload-time = "2025-05-10T14:13:37.712Z" }, - { url = "https://files.pythonhosted.org/packages/34/da/a44705fe44dd87e0f09861b062f93ebb114365640dbdd62cbe80da9b8306/pyzstd-0.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:1d0346418abcef11507356a31bef5470520f6a5a786d4e2c69109408361b1020", size = 222967, upload-time = "2025-05-10T14:13:38.94Z" }, - { url = "https://files.pythonhosted.org/packages/b8/95/b1ae395968efdba92704c23f2f8e027d08e00d1407671e42f65ac914d211/pyzstd-0.17.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3ce6bac0c4c032c5200647992a8efcb9801c918633ebe11cceba946afea152d9", size = 368391, upload-time = "2025-05-10T14:14:33.064Z" }, - { url = "https://files.pythonhosted.org/packages/c7/72/856831cacef58492878b8307353e28a3ba4326a85c3c82e4803a95ad0d14/pyzstd-0.17.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:a00998144b35be7c485a383f739fe0843a784cd96c3f1f2f53f1a249545ce49a", size = 283561, upload-time = "2025-05-10T14:14:34.469Z" }, - { url = "https://files.pythonhosted.org/packages/a4/a7/a86e55cd9f3e630a71c0bf78ac6da0c6b50dc428ca81aa7c5adbc66eb880/pyzstd-0.17.0-pp311-pypy311_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8521d7bbd00e0e1c1fd222c1369a7600fba94d24ba380618f9f75ee0c375c277", size = 356912, upload-time = "2025-05-10T14:14:35.722Z" }, - { url = "https://files.pythonhosted.org/packages/ad/b7/de2b42dd96dfdb1c0feb5f43d53db2d3a060607f878da7576f35dff68789/pyzstd-0.17.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da65158c877eac78dcc108861d607c02fb3703195c3a177f2687e0bcdfd519d0", size = 329417, upload-time = "2025-05-10T14:14:37.487Z" }, - { url = "https://files.pythonhosted.org/packages/52/65/d4e8196e068e6b430499fb2a5092380eb2cb7eecf459b9d4316cff7ecf6c/pyzstd-0.17.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:226ca0430e2357abae1ade802585231a2959b010ec9865600e416652121ba80b", size = 349448, upload-time = "2025-05-10T14:14:38.797Z" }, - { url = "https://files.pythonhosted.org/packages/9e/15/b5ed5ad8c8d2d80c5f5d51e6c61b2cc05f93aaf171164f67ccc7ade815cd/pyzstd-0.17.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e3a19e8521c145a0e2cd87ca464bf83604000c5454f7e0746092834fd7de84d1", size = 241668, upload-time = "2025-05-10T14:14:40.18Z" }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, ] [[package]] @@ -5270,52 +5294,52 @@ hiredis = [ [[package]] name = "referencing" -version = "0.36.2" +version = "0.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "rpds-py" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, ] [[package]] name = "regex" -version = "2025.9.1" +version = "2025.9.18" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/5a/4c63457fbcaf19d138d72b2e9b39405954f98c0349b31c601bfcb151582c/regex-2025.9.1.tar.gz", hash = "sha256:88ac07b38d20b54d79e704e38aa3bd2c0f8027432164226bdee201a1c0c9c9ff", size = 400852, upload-time = "2025-09-01T22:10:10.479Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/d3/eaa0d28aba6ad1827ad1e716d9a93e1ba963ada61887498297d3da715133/regex-2025.9.18.tar.gz", hash = "sha256:c5ba23274c61c6fef447ba6a39333297d0c247f53059dba0bca415cac511edc4", size = 400917, upload-time = "2025-09-19T00:38:35.79Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/4d/f741543c0c59f96c6625bc6c11fea1da2e378b7d293ffff6f318edc0ce14/regex-2025.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e5bcf112b09bfd3646e4db6bf2e598534a17d502b0c01ea6550ba4eca780c5e6", size = 484811, upload-time = "2025-09-01T22:08:12.834Z" }, - { url = "https://files.pythonhosted.org/packages/c2/bd/27e73e92635b6fbd51afc26a414a3133243c662949cd1cda677fe7bb09bd/regex-2025.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:67a0295a3c31d675a9ee0238d20238ff10a9a2fdb7a1323c798fc7029578b15c", size = 288977, upload-time = "2025-09-01T22:08:14.499Z" }, - { url = "https://files.pythonhosted.org/packages/eb/7d/7dc0c6efc8bc93cd6e9b947581f5fde8a5dbaa0af7c4ec818c5729fdc807/regex-2025.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea8267fbadc7d4bd7c1301a50e85c2ff0de293ff9452a1a9f8d82c6cafe38179", size = 286606, upload-time = "2025-09-01T22:08:15.881Z" }, - { url = "https://files.pythonhosted.org/packages/d1/01/9b5c6dd394f97c8f2c12f6e8f96879c9ac27292a718903faf2e27a0c09f6/regex-2025.9.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6aeff21de7214d15e928fb5ce757f9495214367ba62875100d4c18d293750cc1", size = 792436, upload-time = "2025-09-01T22:08:17.38Z" }, - { url = "https://files.pythonhosted.org/packages/fc/24/b7430cfc6ee34bbb3db6ff933beb5e7692e5cc81e8f6f4da63d353566fb0/regex-2025.9.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d89f1bbbbbc0885e1c230f7770d5e98f4f00b0ee85688c871d10df8b184a6323", size = 858705, upload-time = "2025-09-01T22:08:19.037Z" }, - { url = "https://files.pythonhosted.org/packages/d6/98/155f914b4ea6ae012663188545c4f5216c11926d09b817127639d618b003/regex-2025.9.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca3affe8ddea498ba9d294ab05f5f2d3b5ad5d515bc0d4a9016dd592a03afe52", size = 905881, upload-time = "2025-09-01T22:08:20.377Z" }, - { url = "https://files.pythonhosted.org/packages/8a/a7/a470e7bc8259c40429afb6d6a517b40c03f2f3e455c44a01abc483a1c512/regex-2025.9.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91892a7a9f0a980e4c2c85dd19bc14de2b219a3a8867c4b5664b9f972dcc0c78", size = 798968, upload-time = "2025-09-01T22:08:22.081Z" }, - { url = "https://files.pythonhosted.org/packages/1d/fa/33f6fec4d41449fea5f62fdf5e46d668a1c046730a7f4ed9f478331a8e3a/regex-2025.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e1cb40406f4ae862710615f9f636c1e030fd6e6abe0e0f65f6a695a2721440c6", size = 781884, upload-time = "2025-09-01T22:08:23.832Z" }, - { url = "https://files.pythonhosted.org/packages/42/de/2b45f36ab20da14eedddf5009d370625bc5942d9953fa7e5037a32d66843/regex-2025.9.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94f6cff6f7e2149c7e6499a6ecd4695379eeda8ccbccb9726e8149f2fe382e92", size = 852935, upload-time = "2025-09-01T22:08:25.536Z" }, - { url = "https://files.pythonhosted.org/packages/1e/f9/878f4fc92c87e125e27aed0f8ee0d1eced9b541f404b048f66f79914475a/regex-2025.9.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6c0226fb322b82709e78c49cc33484206647f8a39954d7e9de1567f5399becd0", size = 844340, upload-time = "2025-09-01T22:08:27.141Z" }, - { url = "https://files.pythonhosted.org/packages/90/c2/5b6f2bce6ece5f8427c718c085eca0de4bbb4db59f54db77aa6557aef3e9/regex-2025.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a12f59c7c380b4fcf7516e9cbb126f95b7a9518902bcf4a852423ff1dcd03e6a", size = 787238, upload-time = "2025-09-01T22:08:28.75Z" }, - { url = "https://files.pythonhosted.org/packages/47/66/1ef1081c831c5b611f6f55f6302166cfa1bc9574017410ba5595353f846a/regex-2025.9.1-cp311-cp311-win32.whl", hash = "sha256:49865e78d147a7a4f143064488da5d549be6bfc3f2579e5044cac61f5c92edd4", size = 264118, upload-time = "2025-09-01T22:08:30.388Z" }, - { url = "https://files.pythonhosted.org/packages/ad/e0/8adc550d7169df1d6b9be8ff6019cda5291054a0107760c2f30788b6195f/regex-2025.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:d34b901f6f2f02ef60f4ad3855d3a02378c65b094efc4b80388a3aeb700a5de7", size = 276151, upload-time = "2025-09-01T22:08:32.073Z" }, - { url = "https://files.pythonhosted.org/packages/cb/bd/46fef29341396d955066e55384fb93b0be7d64693842bf4a9a398db6e555/regex-2025.9.1-cp311-cp311-win_arm64.whl", hash = "sha256:47d7c2dab7e0b95b95fd580087b6ae196039d62306a592fa4e162e49004b6299", size = 268460, upload-time = "2025-09-01T22:08:33.281Z" }, - { url = "https://files.pythonhosted.org/packages/39/ef/a0372febc5a1d44c1be75f35d7e5aff40c659ecde864d7fa10e138f75e74/regex-2025.9.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:84a25164bd8dcfa9f11c53f561ae9766e506e580b70279d05a7946510bdd6f6a", size = 486317, upload-time = "2025-09-01T22:08:34.529Z" }, - { url = "https://files.pythonhosted.org/packages/b5/25/d64543fb7eb41a1024786d518cc57faf1ce64aa6e9ddba097675a0c2f1d2/regex-2025.9.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:645e88a73861c64c1af558dd12294fb4e67b5c1eae0096a60d7d8a2143a611c7", size = 289698, upload-time = "2025-09-01T22:08:36.162Z" }, - { url = "https://files.pythonhosted.org/packages/d8/dc/fbf31fc60be317bd9f6f87daa40a8a9669b3b392aa8fe4313df0a39d0722/regex-2025.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10a450cba5cd5409526ee1d4449f42aad38dd83ac6948cbd6d7f71ca7018f7db", size = 287242, upload-time = "2025-09-01T22:08:37.794Z" }, - { url = "https://files.pythonhosted.org/packages/0f/74/f933a607a538f785da5021acf5323961b4620972e2c2f1f39b6af4b71db7/regex-2025.9.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9dc5991592933a4192c166eeb67b29d9234f9c86344481173d1bc52f73a7104", size = 797441, upload-time = "2025-09-01T22:08:39.108Z" }, - { url = "https://files.pythonhosted.org/packages/89/d0/71fc49b4f20e31e97f199348b8c4d6e613e7b6a54a90eb1b090c2b8496d7/regex-2025.9.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a32291add816961aab472f4fad344c92871a2ee33c6c219b6598e98c1f0108f2", size = 862654, upload-time = "2025-09-01T22:08:40.586Z" }, - { url = "https://files.pythonhosted.org/packages/59/05/984edce1411a5685ba9abbe10d42cdd9450aab4a022271f9585539788150/regex-2025.9.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:588c161a68a383478e27442a678e3b197b13c5ba51dbba40c1ccb8c4c7bee9e9", size = 910862, upload-time = "2025-09-01T22:08:42.416Z" }, - { url = "https://files.pythonhosted.org/packages/b2/02/5c891bb5fe0691cc1bad336e3a94b9097fbcf9707ec8ddc1dce9f0397289/regex-2025.9.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47829ffaf652f30d579534da9085fe30c171fa2a6744a93d52ef7195dc38218b", size = 801991, upload-time = "2025-09-01T22:08:44.072Z" }, - { url = "https://files.pythonhosted.org/packages/f1/ae/fd10d6ad179910f7a1b3e0a7fde1ef8bb65e738e8ac4fd6ecff3f52252e4/regex-2025.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e978e5a35b293ea43f140c92a3269b6ab13fe0a2bf8a881f7ac740f5a6ade85", size = 786651, upload-time = "2025-09-01T22:08:46.079Z" }, - { url = "https://files.pythonhosted.org/packages/30/cf/9d686b07bbc5bf94c879cc168db92542d6bc9fb67088d03479fef09ba9d3/regex-2025.9.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf09903e72411f4bf3ac1eddd624ecfd423f14b2e4bf1c8b547b72f248b7bf7", size = 856556, upload-time = "2025-09-01T22:08:48.376Z" }, - { url = "https://files.pythonhosted.org/packages/91/9d/302f8a29bb8a49528abbab2d357a793e2a59b645c54deae0050f8474785b/regex-2025.9.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d016b0f77be63e49613c9e26aaf4a242f196cd3d7a4f15898f5f0ab55c9b24d2", size = 849001, upload-time = "2025-09-01T22:08:50.067Z" }, - { url = "https://files.pythonhosted.org/packages/93/fa/b4c6dbdedc85ef4caec54c817cd5f4418dbfa2453214119f2538082bf666/regex-2025.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:656563e620de6908cd1c9d4f7b9e0777e3341ca7db9d4383bcaa44709c90281e", size = 788138, upload-time = "2025-09-01T22:08:51.933Z" }, - { url = "https://files.pythonhosted.org/packages/4a/1b/91ee17a3cbf87f81e8c110399279d0e57f33405468f6e70809100f2ff7d8/regex-2025.9.1-cp312-cp312-win32.whl", hash = "sha256:df33f4ef07b68f7ab637b1dbd70accbf42ef0021c201660656601e8a9835de45", size = 264524, upload-time = "2025-09-01T22:08:53.75Z" }, - { url = "https://files.pythonhosted.org/packages/92/28/6ba31cce05b0f1ec6b787921903f83bd0acf8efde55219435572af83c350/regex-2025.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:5aba22dfbc60cda7c0853516104724dc904caa2db55f2c3e6e984eb858d3edf3", size = 275489, upload-time = "2025-09-01T22:08:55.037Z" }, - { url = "https://files.pythonhosted.org/packages/bd/ed/ea49f324db00196e9ef7fe00dd13c6164d5173dd0f1bbe495e61bb1fb09d/regex-2025.9.1-cp312-cp312-win_arm64.whl", hash = "sha256:ec1efb4c25e1849c2685fa95da44bfde1b28c62d356f9c8d861d4dad89ed56e9", size = 268589, upload-time = "2025-09-01T22:08:56.369Z" }, + { url = "https://files.pythonhosted.org/packages/58/61/80eda662fc4eb32bfedc331f42390974c9e89c7eac1b79cd9eea4d7c458c/regex-2025.9.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:51076980cd08cd13c88eb7365427ae27f0d94e7cebe9ceb2bb9ffdae8fc4d82a", size = 484832, upload-time = "2025-09-19T00:35:30.011Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d9/33833d9abddf3f07ad48504ddb53fe3b22f353214bbb878a72eee1e3ddbf/regex-2025.9.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:828446870bd7dee4e0cbeed767f07961aa07f0ea3129f38b3ccecebc9742e0b8", size = 288994, upload-time = "2025-09-19T00:35:31.733Z" }, + { url = "https://files.pythonhosted.org/packages/2a/b3/526ee96b0d70ea81980cbc20c3496fa582f775a52e001e2743cc33b2fa75/regex-2025.9.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c28821d5637866479ec4cc23b8c990f5bc6dd24e5e4384ba4a11d38a526e1414", size = 286619, upload-time = "2025-09-19T00:35:33.221Z" }, + { url = "https://files.pythonhosted.org/packages/65/4f/c2c096b02a351b33442aed5895cdd8bf87d372498d2100927c5a053d7ba3/regex-2025.9.18-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:726177ade8e481db669e76bf99de0b278783be8acd11cef71165327abd1f170a", size = 792454, upload-time = "2025-09-19T00:35:35.361Z" }, + { url = "https://files.pythonhosted.org/packages/24/15/b562c9d6e47c403c4b5deb744f8b4bf6e40684cf866c7b077960a925bdff/regex-2025.9.18-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f5cca697da89b9f8ea44115ce3130f6c54c22f541943ac8e9900461edc2b8bd4", size = 858723, upload-time = "2025-09-19T00:35:36.949Z" }, + { url = "https://files.pythonhosted.org/packages/f2/01/dba305409849e85b8a1a681eac4c03ed327d8de37895ddf9dc137f59c140/regex-2025.9.18-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dfbde38f38004703c35666a1e1c088b778e35d55348da2b7b278914491698d6a", size = 905899, upload-time = "2025-09-19T00:35:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d0/c51d1e6a80eab11ef96a4cbad17fc0310cf68994fb01a7283276b7e5bbd6/regex-2025.9.18-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f2f422214a03fab16bfa495cfec72bee4aaa5731843b771860a471282f1bf74f", size = 798981, upload-time = "2025-09-19T00:35:40.416Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5e/72db90970887bbe02296612bd61b0fa31e6d88aa24f6a4853db3e96c575e/regex-2025.9.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a295916890f4df0902e4286bc7223ee7f9e925daa6dcdec4192364255b70561a", size = 781900, upload-time = "2025-09-19T00:35:42.077Z" }, + { url = "https://files.pythonhosted.org/packages/50/ff/596be45eea8e9bc31677fde243fa2904d00aad1b32c31bce26c3dbba0b9e/regex-2025.9.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5db95ff632dbabc8c38c4e82bf545ab78d902e81160e6e455598014f0abe66b9", size = 852952, upload-time = "2025-09-19T00:35:43.751Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1b/2dfa348fa551e900ed3f5f63f74185b6a08e8a76bc62bc9c106f4f92668b/regex-2025.9.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb967eb441b0f15ae610b7069bdb760b929f267efbf522e814bbbfffdf125ce2", size = 844355, upload-time = "2025-09-19T00:35:45.309Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/aefb1def27fe33b8cbbb19c75c13aefccfbef1c6686f8e7f7095705969c7/regex-2025.9.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f04d2f20da4053d96c08f7fde6e1419b7ec9dbcee89c96e3d731fca77f411b95", size = 787254, upload-time = "2025-09-19T00:35:46.904Z" }, + { url = "https://files.pythonhosted.org/packages/e3/4e/8ef042e7cf0dbbb401e784e896acfc1b367b95dfbfc9ada94c2ed55a081f/regex-2025.9.18-cp311-cp311-win32.whl", hash = "sha256:895197241fccf18c0cea7550c80e75f185b8bd55b6924fcae269a1a92c614a07", size = 264129, upload-time = "2025-09-19T00:35:48.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/7d/c4fcabf80dcdd6821c0578ad9b451f8640b9110fb3dcb74793dd077069ff/regex-2025.9.18-cp311-cp311-win_amd64.whl", hash = "sha256:7e2b414deae99166e22c005e154a5513ac31493db178d8aec92b3269c9cce8c9", size = 276160, upload-time = "2025-09-19T00:36:00.45Z" }, + { url = "https://files.pythonhosted.org/packages/64/f8/0e13c8ae4d6df9d128afaba138342d532283d53a4c1e7a8c93d6756c8f4a/regex-2025.9.18-cp311-cp311-win_arm64.whl", hash = "sha256:fb137ec7c5c54f34a25ff9b31f6b7b0c2757be80176435bf367111e3f71d72df", size = 268471, upload-time = "2025-09-19T00:36:02.149Z" }, + { url = "https://files.pythonhosted.org/packages/b0/99/05859d87a66ae7098222d65748f11ef7f2dff51bfd7482a4e2256c90d72b/regex-2025.9.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:436e1b31d7efd4dcd52091d076482031c611dde58bf9c46ca6d0a26e33053a7e", size = 486335, upload-time = "2025-09-19T00:36:03.661Z" }, + { url = "https://files.pythonhosted.org/packages/97/7e/d43d4e8b978890932cf7b0957fce58c5b08c66f32698f695b0c2c24a48bf/regex-2025.9.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c190af81e5576b9c5fdc708f781a52ff20f8b96386c6e2e0557a78402b029f4a", size = 289720, upload-time = "2025-09-19T00:36:05.471Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/ff80886089eb5dcf7e0d2040d9aaed539e25a94300403814bb24cc775058/regex-2025.9.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e4121f1ce2b2b5eec4b397cc1b277686e577e658d8f5870b7eb2d726bd2300ab", size = 287257, upload-time = "2025-09-19T00:36:07.072Z" }, + { url = "https://files.pythonhosted.org/packages/ee/66/243edf49dd8720cba8d5245dd4d6adcb03a1defab7238598c0c97cf549b8/regex-2025.9.18-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:300e25dbbf8299d87205e821a201057f2ef9aa3deb29caa01cd2cac669e508d5", size = 797463, upload-time = "2025-09-19T00:36:08.399Z" }, + { url = "https://files.pythonhosted.org/packages/df/71/c9d25a1142c70432e68bb03211d4a82299cd1c1fbc41db9409a394374ef5/regex-2025.9.18-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7b47fcf9f5316c0bdaf449e879407e1b9937a23c3b369135ca94ebc8d74b1742", size = 862670, upload-time = "2025-09-19T00:36:10.101Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8f/329b1efc3a64375a294e3a92d43372bf1a351aa418e83c21f2f01cf6ec41/regex-2025.9.18-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:57a161bd3acaa4b513220b49949b07e252165e6b6dc910ee7617a37ff4f5b425", size = 910881, upload-time = "2025-09-19T00:36:12.223Z" }, + { url = "https://files.pythonhosted.org/packages/35/9e/a91b50332a9750519320ed30ec378b74c996f6befe282cfa6bb6cea7e9fd/regex-2025.9.18-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f130c3a7845ba42de42f380fff3c8aebe89a810747d91bcf56d40a069f15352", size = 802011, upload-time = "2025-09-19T00:36:13.901Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1d/6be3b8d7856b6e0d7ee7f942f437d0a76e0d5622983abbb6d21e21ab9a17/regex-2025.9.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f96fa342b6f54dcba928dd452e8d8cb9f0d63e711d1721cd765bb9f73bb048d", size = 786668, upload-time = "2025-09-19T00:36:15.391Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ce/4a60e53df58bd157c5156a1736d3636f9910bdcc271d067b32b7fcd0c3a8/regex-2025.9.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f0d676522d68c207828dcd01fb6f214f63f238c283d9f01d85fc664c7c85b56", size = 856578, upload-time = "2025-09-19T00:36:16.845Z" }, + { url = "https://files.pythonhosted.org/packages/86/e8/162c91bfe7217253afccde112868afb239f94703de6580fb235058d506a6/regex-2025.9.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:40532bff8a1a0621e7903ae57fce88feb2e8a9a9116d341701302c9302aef06e", size = 849017, upload-time = "2025-09-19T00:36:18.597Z" }, + { url = "https://files.pythonhosted.org/packages/35/34/42b165bc45289646ea0959a1bc7531733e90b47c56a72067adfe6b3251f6/regex-2025.9.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:039f11b618ce8d71a1c364fdee37da1012f5a3e79b1b2819a9f389cd82fd6282", size = 788150, upload-time = "2025-09-19T00:36:20.464Z" }, + { url = "https://files.pythonhosted.org/packages/79/5d/cdd13b1f3c53afa7191593a7ad2ee24092a5a46417725ffff7f64be8342d/regex-2025.9.18-cp312-cp312-win32.whl", hash = "sha256:e1dd06f981eb226edf87c55d523131ade7285137fbde837c34dc9d1bf309f459", size = 264536, upload-time = "2025-09-19T00:36:21.922Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f5/4a7770c9a522e7d2dc1fa3ffc83ab2ab33b0b22b447e62cffef186805302/regex-2025.9.18-cp312-cp312-win_amd64.whl", hash = "sha256:3d86b5247bf25fa3715e385aa9ff272c307e0636ce0c9595f64568b41f0a9c77", size = 275501, upload-time = "2025-09-19T00:36:23.4Z" }, + { url = "https://files.pythonhosted.org/packages/df/05/9ce3e110e70d225ecbed455b966003a3afda5e58e8aec2964042363a18f4/regex-2025.9.18-cp312-cp312-win_arm64.whl", hash = "sha256:032720248cbeeae6444c269b78cb15664458b7bb9ed02401d3da59fe4d68c3a5", size = 268601, upload-time = "2025-09-19T00:36:25.092Z" }, ] [[package]] @@ -5386,15 +5410,15 @@ wheels = [ [[package]] name = "rich" -version = "14.1.0" +version = "14.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, ] [[package]] @@ -5521,28 +5545,28 @@ wheels = [ [[package]] name = "scipy-stubs" -version = "1.16.2.0" +version = "1.16.2.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "optype", extra = ["numpy"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/84/b4c2caf7748f331870992e7ede5b5df0b080671bcef8c8c7e27a3cf8694a/scipy_stubs-1.16.2.0.tar.gz", hash = "sha256:8fdd45155fca401bb755b1b63ac2f192f84f25c3be8da2c99d1cafb2708f3052", size = 352676, upload-time = "2025-09-11T23:28:59.236Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/47/b165711b36a1afb8d5f408393487586e07f8bdb86f829b5b904c1ddd091f/scipy_stubs-1.16.2.3.tar.gz", hash = "sha256:b1afd21442699b8bdd399508187bddcedc6c29a34b188fd603396cb6754c2a91", size = 355436, upload-time = "2025-10-08T01:38:37.403Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/c8/67d984c264f759e7653c130a4b12ae3b4f4304867579560e9a869adb7883/scipy_stubs-1.16.2.0-py3-none-any.whl", hash = "sha256:18c50d49e3c932033fdd4f7fa4fea9e45c8787f92bceaec9e86ccbd140e835d5", size = 553247, upload-time = "2025-09-11T23:28:57.688Z" }, + { url = "https://files.pythonhosted.org/packages/29/9f/3d8f613d0c3be9348cb0c351328249b7a2428f13329447ec6f395628d7b0/scipy_stubs-1.16.2.3-py3-none-any.whl", hash = "sha256:05e93238bdaedb7fa1afedf9c3a2337f94fec3d8c33fb2d403c933e1bcc7412e", size = 556848, upload-time = "2025-10-08T01:38:35.697Z" }, ] [[package]] name = "sendgrid" -version = "6.12.4" +version = "6.12.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "ecdsa" }, + { name = "cryptography" }, { name = "python-http-client" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/11/31/62e00433878dccf33edf07f8efa417b9030a2464eb3b04bbd797a11b4447/sendgrid-6.12.4.tar.gz", hash = "sha256:9e88b849daf0fa4bdf256c3b5da9f5a3272402c0c2fd6b1928c9de440db0a03d", size = 50271, upload-time = "2025-06-12T10:29:37.213Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/fa/f718b2b953f99c1f0085811598ac7e31ccbd4229a81ec2a5290be868187a/sendgrid-6.12.5.tar.gz", hash = "sha256:ea9aae30cd55c332e266bccd11185159482edfc07c149b6cd15cf08869fabdb7", size = 50310, upload-time = "2025-09-19T06:23:09.229Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/9c/45d068fd831a65e6ed1e2ab3233de58784842afdc62fdcdd0a01bbb6b39d/sendgrid-6.12.4-py3-none-any.whl", hash = "sha256:9a211b96241e63bd5b9ed9afcc8608f4bcac426e4a319b3920ab877c8426e92c", size = 102122, upload-time = "2025-06-12T10:29:35.457Z" }, + { url = "https://files.pythonhosted.org/packages/bd/55/b3c3880a77082e8f7374954e0074aafafaa9bc78bdf9c8f5a92c2e7afc6a/sendgrid-6.12.5-py3-none-any.whl", hash = "sha256:96f92cc91634bf552fdb766b904bbb53968018da7ae41fdac4d1090dc0311ca8", size = 102173, upload-time = "2025-09-19T06:23:07.93Z" }, ] [[package]] @@ -5576,29 +5600,29 @@ wheels = [ [[package]] name = "shapely" -version = "2.1.1" +version = "2.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ca/3c/2da625233f4e605155926566c0e7ea8dda361877f48e8b1655e53456f252/shapely-2.1.1.tar.gz", hash = "sha256:500621967f2ffe9642454808009044c21e5b35db89ce69f8a2042c2ffd0e2772", size = 315422, upload-time = "2025-05-19T11:04:41.265Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/19/97/2df985b1e03f90c503796ad5ecd3d9ed305123b64d4ccb54616b30295b29/shapely-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:587a1aa72bc858fab9b8c20427b5f6027b7cbc92743b8e2c73b9de55aa71c7a7", size = 1819368, upload-time = "2025-05-19T11:03:55.937Z" }, - { url = "https://files.pythonhosted.org/packages/56/17/504518860370f0a28908b18864f43d72f03581e2b6680540ca668f07aa42/shapely-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fa5c53b0791a4b998f9ad84aad456c988600757a96b0a05e14bba10cebaaaea", size = 1625362, upload-time = "2025-05-19T11:03:57.06Z" }, - { url = "https://files.pythonhosted.org/packages/36/a1/9677337d729b79fce1ef3296aac6b8ef4743419086f669e8a8070eff8f40/shapely-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aabecd038841ab5310d23495253f01c2a82a3aedae5ab9ca489be214aa458aa7", size = 2999005, upload-time = "2025-05-19T11:03:58.692Z" }, - { url = "https://files.pythonhosted.org/packages/a2/17/e09357274699c6e012bbb5a8ea14765a4d5860bb658df1931c9f90d53bd3/shapely-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586f6aee1edec04e16227517a866df3e9a2e43c1f635efc32978bb3dc9c63753", size = 3108489, upload-time = "2025-05-19T11:04:00.059Z" }, - { url = "https://files.pythonhosted.org/packages/17/5d/93a6c37c4b4e9955ad40834f42b17260ca74ecf36df2e81bb14d12221b90/shapely-2.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b9878b9e37ad26c72aada8de0c9cfe418d9e2ff36992a1693b7f65a075b28647", size = 3945727, upload-time = "2025-05-19T11:04:01.786Z" }, - { url = "https://files.pythonhosted.org/packages/a3/1a/ad696648f16fd82dd6bfcca0b3b8fbafa7aacc13431c7fc4c9b49e481681/shapely-2.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9a531c48f289ba355e37b134e98e28c557ff13965d4653a5228d0f42a09aed0", size = 4109311, upload-time = "2025-05-19T11:04:03.134Z" }, - { url = "https://files.pythonhosted.org/packages/d4/38/150dd245beab179ec0d4472bf6799bf18f21b1efbef59ac87de3377dbf1c/shapely-2.1.1-cp311-cp311-win32.whl", hash = "sha256:4866de2673a971820c75c0167b1f1cd8fb76f2d641101c23d3ca021ad0449bab", size = 1522982, upload-time = "2025-05-19T11:04:05.217Z" }, - { url = "https://files.pythonhosted.org/packages/93/5b/842022c00fbb051083c1c85430f3bb55565b7fd2d775f4f398c0ba8052ce/shapely-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:20a9d79958b3d6c70d8a886b250047ea32ff40489d7abb47d01498c704557a93", size = 1703872, upload-time = "2025-05-19T11:04:06.791Z" }, - { url = "https://files.pythonhosted.org/packages/fb/64/9544dc07dfe80a2d489060791300827c941c451e2910f7364b19607ea352/shapely-2.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2827365b58bf98efb60affc94a8e01c56dd1995a80aabe4b701465d86dcbba43", size = 1833021, upload-time = "2025-05-19T11:04:08.022Z" }, - { url = "https://files.pythonhosted.org/packages/07/aa/fb5f545e72e89b6a0f04a0effda144f5be956c9c312c7d4e00dfddbddbcf/shapely-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9c551f7fa7f1e917af2347fe983f21f212863f1d04f08eece01e9c275903fad", size = 1643018, upload-time = "2025-05-19T11:04:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/03/46/61e03edba81de729f09d880ce7ae5c1af873a0814206bbfb4402ab5c3388/shapely-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78dec4d4fbe7b1db8dc36de3031767e7ece5911fb7782bc9e95c5cdec58fb1e9", size = 2986417, upload-time = "2025-05-19T11:04:10.56Z" }, - { url = "https://files.pythonhosted.org/packages/1f/1e/83ec268ab8254a446b4178b45616ab5822d7b9d2b7eb6e27cf0b82f45601/shapely-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:872d3c0a7b8b37da0e23d80496ec5973c4692920b90de9f502b5beb994bbaaef", size = 3098224, upload-time = "2025-05-19T11:04:11.903Z" }, - { url = "https://files.pythonhosted.org/packages/f1/44/0c21e7717c243e067c9ef8fa9126de24239f8345a5bba9280f7bb9935959/shapely-2.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e2b9125ebfbc28ecf5353511de62f75a8515ae9470521c9a693e4bb9fbe0cf1", size = 3925982, upload-time = "2025-05-19T11:04:13.224Z" }, - { url = "https://files.pythonhosted.org/packages/15/50/d3b4e15fefc103a0eb13d83bad5f65cd6e07a5d8b2ae920e767932a247d1/shapely-2.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4b96cea171b3d7f6786976a0520f178c42792897653ecca0c5422fb1e6946e6d", size = 4089122, upload-time = "2025-05-19T11:04:14.477Z" }, - { url = "https://files.pythonhosted.org/packages/bd/05/9a68f27fc6110baeedeeebc14fd86e73fa38738c5b741302408fb6355577/shapely-2.1.1-cp312-cp312-win32.whl", hash = "sha256:39dca52201e02996df02e447f729da97cfb6ff41a03cb50f5547f19d02905af8", size = 1522437, upload-time = "2025-05-19T11:04:16.203Z" }, - { url = "https://files.pythonhosted.org/packages/bc/e9/a4560e12b9338842a1f82c9016d2543eaa084fce30a1ca11991143086b57/shapely-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:13d643256f81d55a50013eff6321142781cf777eb6a9e207c2c9e6315ba6044a", size = 1703479, upload-time = "2025-05-19T11:04:18.497Z" }, + { url = "https://files.pythonhosted.org/packages/8f/8d/1ff672dea9ec6a7b5d422eb6d095ed886e2e523733329f75fdcb14ee1149/shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618", size = 1820038, upload-time = "2025-09-24T13:50:15.628Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ce/28fab8c772ce5db23a0d86bf0adaee0c4c79d5ad1db766055fa3dab442e2/shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d", size = 1626039, upload-time = "2025-09-24T13:50:16.881Z" }, + { url = "https://files.pythonhosted.org/packages/70/8b/868b7e3f4982f5006e9395c1e12343c66a8155c0374fdc07c0e6a1ab547d/shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09", size = 3001519, upload-time = "2025-09-24T13:50:18.606Z" }, + { url = "https://files.pythonhosted.org/packages/13/02/58b0b8d9c17c93ab6340edd8b7308c0c5a5b81f94ce65705819b7416dba5/shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26", size = 3110842, upload-time = "2025-09-24T13:50:21.77Z" }, + { url = "https://files.pythonhosted.org/packages/af/61/8e389c97994d5f331dcffb25e2fa761aeedfb52b3ad9bcdd7b8671f4810a/shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7", size = 4021316, upload-time = "2025-09-24T13:50:23.626Z" }, + { url = "https://files.pythonhosted.org/packages/d3/d4/9b2a9fe6039f9e42ccf2cb3e84f219fd8364b0c3b8e7bbc857b5fbe9c14c/shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2", size = 4178586, upload-time = "2025-09-24T13:50:25.443Z" }, + { url = "https://files.pythonhosted.org/packages/16/f6/9840f6963ed4decf76b08fd6d7fed14f8779fb7a62cb45c5617fa8ac6eab/shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6", size = 1543961, upload-time = "2025-09-24T13:50:26.968Z" }, + { url = "https://files.pythonhosted.org/packages/38/1e/3f8ea46353c2a33c1669eb7327f9665103aa3a8dfe7f2e4ef714c210b2c2/shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc", size = 1722856, upload-time = "2025-09-24T13:50:28.497Z" }, + { url = "https://files.pythonhosted.org/packages/24/c0/f3b6453cf2dfa99adc0ba6675f9aaff9e526d2224cbd7ff9c1a879238693/shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94", size = 1833550, upload-time = "2025-09-24T13:50:30.019Z" }, + { url = "https://files.pythonhosted.org/packages/86/07/59dee0bc4b913b7ab59ab1086225baca5b8f19865e6101db9ebb7243e132/shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359", size = 1643556, upload-time = "2025-09-24T13:50:32.291Z" }, + { url = "https://files.pythonhosted.org/packages/26/29/a5397e75b435b9895cd53e165083faed5d12fd9626eadec15a83a2411f0f/shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3", size = 2988308, upload-time = "2025-09-24T13:50:33.862Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/e781683abac55dde9771e086b790e554811a71ed0b2b8a1e789b7430dd44/shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b", size = 3099844, upload-time = "2025-09-24T13:50:35.459Z" }, + { url = "https://files.pythonhosted.org/packages/d8/f3/9876b64d4a5a321b9dc482c92bb6f061f2fa42131cba643c699f39317cb9/shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc", size = 3988842, upload-time = "2025-09-24T13:50:37.478Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/704c7292f7014c7e74ec84eddb7b109e1fbae74a16deae9c1504b1d15565/shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d", size = 4152714, upload-time = "2025-09-24T13:50:39.9Z" }, + { url = "https://files.pythonhosted.org/packages/53/46/319c9dc788884ad0785242543cdffac0e6530e4d0deb6c4862bc4143dcf3/shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454", size = 1542745, upload-time = "2025-09-24T13:50:41.414Z" }, + { url = "https://files.pythonhosted.org/packages/ec/bf/cb6c1c505cb31e818e900b9312d514f381fbfa5c4363edfce0fcc4f8c1a4/shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179", size = 1722861, upload-time = "2025-09-24T13:50:43.35Z" }, ] [[package]] @@ -5666,31 +5690,31 @@ wheels = [ [[package]] name = "sqlalchemy" -version = "2.0.43" +version = "2.0.44" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d7/bc/d59b5d97d27229b0e009bd9098cd81af71c2fa5549c580a0a67b9bed0496/sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417", size = 9762949, upload-time = "2025-08-11T14:24:58.438Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22", size = 9819830, upload-time = "2025-10-10T14:39:12.935Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9d/77/fa7189fe44114658002566c6fe443d3ed0ec1fa782feb72af6ef7fbe98e7/sqlalchemy-2.0.43-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52d9b73b8fb3e9da34c2b31e6d99d60f5f99fd8c1225c9dad24aeb74a91e1d29", size = 2136472, upload-time = "2025-08-11T15:52:21.789Z" }, - { url = "https://files.pythonhosted.org/packages/99/ea/92ac27f2fbc2e6c1766bb807084ca455265707e041ba027c09c17d697867/sqlalchemy-2.0.43-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f42f23e152e4545157fa367b2435a1ace7571cab016ca26038867eb7df2c3631", size = 2126535, upload-time = "2025-08-11T15:52:23.109Z" }, - { url = "https://files.pythonhosted.org/packages/94/12/536ede80163e295dc57fff69724caf68f91bb40578b6ac6583a293534849/sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb1a8c5438e0c5ea51afe9c6564f951525795cf432bed0c028c1cb081276685", size = 3297521, upload-time = "2025-08-11T15:50:33.536Z" }, - { url = "https://files.pythonhosted.org/packages/03/b5/cacf432e6f1fc9d156eca0560ac61d4355d2181e751ba8c0cd9cb232c8c1/sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db691fa174e8f7036afefe3061bc40ac2b770718be2862bfb03aabae09051aca", size = 3297343, upload-time = "2025-08-11T15:57:51.186Z" }, - { url = "https://files.pythonhosted.org/packages/ca/ba/d4c9b526f18457667de4c024ffbc3a0920c34237b9e9dd298e44c7c00ee5/sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2b3b4927d0bc03d02ad883f402d5de201dbc8894ac87d2e981e7d87430e60d", size = 3232113, upload-time = "2025-08-11T15:50:34.949Z" }, - { url = "https://files.pythonhosted.org/packages/aa/79/c0121b12b1b114e2c8a10ea297a8a6d5367bc59081b2be896815154b1163/sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d3d9b904ad4a6b175a2de0738248822f5ac410f52c2fd389ada0b5262d6a1e3", size = 3258240, upload-time = "2025-08-11T15:57:52.983Z" }, - { url = "https://files.pythonhosted.org/packages/79/99/a2f9be96fb382f3ba027ad42f00dbe30fdb6ba28cda5f11412eee346bec5/sqlalchemy-2.0.43-cp311-cp311-win32.whl", hash = "sha256:5cda6b51faff2639296e276591808c1726c4a77929cfaa0f514f30a5f6156921", size = 2101248, upload-time = "2025-08-11T15:55:01.855Z" }, - { url = "https://files.pythonhosted.org/packages/ee/13/744a32ebe3b4a7a9c7ea4e57babae7aa22070d47acf330d8e5a1359607f1/sqlalchemy-2.0.43-cp311-cp311-win_amd64.whl", hash = "sha256:c5d1730b25d9a07727d20ad74bc1039bbbb0a6ca24e6769861c1aa5bf2c4c4a8", size = 2126109, upload-time = "2025-08-11T15:55:04.092Z" }, - { url = "https://files.pythonhosted.org/packages/61/db/20c78f1081446095450bdc6ee6cc10045fce67a8e003a5876b6eaafc5cc4/sqlalchemy-2.0.43-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:20d81fc2736509d7a2bd33292e489b056cbae543661bb7de7ce9f1c0cd6e7f24", size = 2134891, upload-time = "2025-08-11T15:51:13.019Z" }, - { url = "https://files.pythonhosted.org/packages/45/0a/3d89034ae62b200b4396f0f95319f7d86e9945ee64d2343dcad857150fa2/sqlalchemy-2.0.43-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b9fc27650ff5a2c9d490c13c14906b918b0de1f8fcbb4c992712d8caf40e83", size = 2123061, upload-time = "2025-08-11T15:51:14.319Z" }, - { url = "https://files.pythonhosted.org/packages/cb/10/2711f7ff1805919221ad5bee205971254845c069ee2e7036847103ca1e4c/sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6772e3ca8a43a65a37c88e2f3e2adfd511b0b1da37ef11ed78dea16aeae85bd9", size = 3320384, upload-time = "2025-08-11T15:52:35.088Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0e/3d155e264d2ed2778484006ef04647bc63f55b3e2d12e6a4f787747b5900/sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a113da919c25f7f641ffbd07fbc9077abd4b3b75097c888ab818f962707eb48", size = 3329648, upload-time = "2025-08-11T15:56:34.153Z" }, - { url = "https://files.pythonhosted.org/packages/5b/81/635100fb19725c931622c673900da5efb1595c96ff5b441e07e3dd61f2be/sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4286a1139f14b7d70141c67a8ae1582fc2b69105f1b09d9573494eb4bb4b2687", size = 3258030, upload-time = "2025-08-11T15:52:36.933Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ed/a99302716d62b4965fded12520c1cbb189f99b17a6d8cf77611d21442e47/sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:529064085be2f4d8a6e5fab12d36ad44f1909a18848fcfbdb59cc6d4bbe48efe", size = 3294469, upload-time = "2025-08-11T15:56:35.553Z" }, - { url = "https://files.pythonhosted.org/packages/5d/a2/3a11b06715149bf3310b55a98b5c1e84a42cfb949a7b800bc75cb4e33abc/sqlalchemy-2.0.43-cp312-cp312-win32.whl", hash = "sha256:b535d35dea8bbb8195e7e2b40059e2253acb2b7579b73c1b432a35363694641d", size = 2098906, upload-time = "2025-08-11T15:55:00.645Z" }, - { url = "https://files.pythonhosted.org/packages/bc/09/405c915a974814b90aa591280623adc6ad6b322f61fd5cff80aeaef216c9/sqlalchemy-2.0.43-cp312-cp312-win_amd64.whl", hash = "sha256:1c6d85327ca688dbae7e2b06d7d84cfe4f3fffa5b5f9e21bb6ce9d0e1a0e0e0a", size = 2126260, upload-time = "2025-08-11T15:55:02.965Z" }, - { url = "https://files.pythonhosted.org/packages/b8/d9/13bdde6521f322861fab67473cec4b1cc8999f3871953531cf61945fad92/sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc", size = 1924759, upload-time = "2025-08-11T15:39:53.024Z" }, + { url = "https://files.pythonhosted.org/packages/e3/81/15d7c161c9ddf0900b076b55345872ed04ff1ed6a0666e5e94ab44b0163c/sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd", size = 2140517, upload-time = "2025-10-10T15:36:15.64Z" }, + { url = "https://files.pythonhosted.org/packages/d4/d5/4abd13b245c7d91bdf131d4916fd9e96a584dac74215f8b5bc945206a974/sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa", size = 2130738, upload-time = "2025-10-10T15:36:16.91Z" }, + { url = "https://files.pythonhosted.org/packages/cb/3c/8418969879c26522019c1025171cefbb2a8586b6789ea13254ac602986c0/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e", size = 3304145, upload-time = "2025-10-10T15:34:19.569Z" }, + { url = "https://files.pythonhosted.org/packages/94/2d/fdb9246d9d32518bda5d90f4b65030b9bf403a935cfe4c36a474846517cb/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e", size = 3304511, upload-time = "2025-10-10T15:47:05.088Z" }, + { url = "https://files.pythonhosted.org/packages/7d/fb/40f2ad1da97d5c83f6c1269664678293d3fe28e90ad17a1093b735420549/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399", size = 3235161, upload-time = "2025-10-10T15:34:21.193Z" }, + { url = "https://files.pythonhosted.org/packages/95/cb/7cf4078b46752dca917d18cf31910d4eff6076e5b513c2d66100c4293d83/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b", size = 3261426, upload-time = "2025-10-10T15:47:07.196Z" }, + { url = "https://files.pythonhosted.org/packages/f8/3b/55c09b285cb2d55bdfa711e778bdffdd0dc3ffa052b0af41f1c5d6e582fa/sqlalchemy-2.0.44-cp311-cp311-win32.whl", hash = "sha256:253e2f29843fb303eca6b2fc645aca91fa7aa0aa70b38b6950da92d44ff267f3", size = 2105392, upload-time = "2025-10-10T15:38:20.051Z" }, + { url = "https://files.pythonhosted.org/packages/c7/23/907193c2f4d680aedbfbdf7bf24c13925e3c7c292e813326c1b84a0b878e/sqlalchemy-2.0.44-cp311-cp311-win_amd64.whl", hash = "sha256:7a8694107eb4308a13b425ca8c0e67112f8134c846b6e1f722698708741215d5", size = 2130293, upload-time = "2025-10-10T15:38:21.601Z" }, + { url = "https://files.pythonhosted.org/packages/62/c4/59c7c9b068e6813c898b771204aad36683c96318ed12d4233e1b18762164/sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250", size = 2139675, upload-time = "2025-10-10T16:03:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/d6/ae/eeb0920537a6f9c5a3708e4a5fc55af25900216bdb4847ec29cfddf3bf3a/sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29", size = 2127726, upload-time = "2025-10-10T16:03:35.934Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d5/2ebbabe0379418eda8041c06b0b551f213576bfe4c2f09d77c06c07c8cc5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44", size = 3327603, upload-time = "2025-10-10T15:35:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/5aa65852dadc24b7d8ae75b7efb8d19303ed6ac93482e60c44a585930ea5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1", size = 3337842, upload-time = "2025-10-10T15:43:45.431Z" }, + { url = "https://files.pythonhosted.org/packages/41/92/648f1afd3f20b71e880ca797a960f638d39d243e233a7082c93093c22378/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7", size = 3264558, upload-time = "2025-10-10T15:35:29.93Z" }, + { url = "https://files.pythonhosted.org/packages/40/cf/e27d7ee61a10f74b17740918e23cbc5bc62011b48282170dc4c66da8ec0f/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d", size = 3301570, upload-time = "2025-10-10T15:43:48.407Z" }, + { url = "https://files.pythonhosted.org/packages/3b/3d/3116a9a7b63e780fb402799b6da227435be878b6846b192f076d2f838654/sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4", size = 2103447, upload-time = "2025-10-10T15:03:21.678Z" }, + { url = "https://files.pythonhosted.org/packages/25/83/24690e9dfc241e6ab062df82cc0df7f4231c79ba98b273fa496fb3dd78ed/sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e", size = 2130912, upload-time = "2025-10-10T15:03:24.656Z" }, + { url = "https://files.pythonhosted.org/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05", size = 1928718, upload-time = "2025-10-10T15:29:45.32Z" }, ] [[package]] @@ -5969,31 +5993,27 @@ wheels = [ [[package]] name = "tomli" -version = "2.2.1" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, ] [[package]] @@ -6023,7 +6043,7 @@ wheels = [ [[package]] name = "transformers" -version = "4.56.1" +version = "4.56.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -6037,39 +6057,39 @@ dependencies = [ { name = "tokenizers" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/89/21/dc88ef3da1e49af07ed69386a11047a31dcf1aaf4ded3bc4b173fbf94116/transformers-4.56.1.tar.gz", hash = "sha256:0d88b1089a563996fc5f2c34502f10516cad3ea1aa89f179f522b54c8311fe74", size = 9855473, upload-time = "2025-09-04T20:47:13.14Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/82/0bcfddd134cdf53440becb5e738257cc3cf34cf229d63b57bfd288e6579f/transformers-4.56.2.tar.gz", hash = "sha256:5e7c623e2d7494105c726dd10f6f90c2c99a55ebe86eef7233765abd0cb1c529", size = 9844296, upload-time = "2025-09-19T15:16:26.778Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/7c/283c3dd35e00e22a7803a0b2a65251347b745474a82399be058bde1c9f15/transformers-4.56.1-py3-none-any.whl", hash = "sha256:1697af6addfb6ddbce9618b763f4b52d5a756f6da4899ffd1b4febf58b779248", size = 11608197, upload-time = "2025-09-04T20:47:04.895Z" }, + { url = "https://files.pythonhosted.org/packages/70/26/2591b48412bde75e33bfd292034103ffe41743cacd03120e3242516cd143/transformers-4.56.2-py3-none-any.whl", hash = "sha256:79c03d0e85b26cb573c109ff9eafa96f3c8d4febfd8a0774e8bba32702dd6dde", size = 11608055, upload-time = "2025-09-19T15:16:23.736Z" }, ] [[package]] name = "ty" -version = "0.0.1a20" +version = "0.0.1a22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7a/82/a5e3b4bc5280ec49c4b0b43d0ff727d58c7df128752c9c6f97ad0b5f575f/ty-0.0.1a20.tar.gz", hash = "sha256:933b65a152f277aa0e23ba9027e5df2c2cc09e18293e87f2a918658634db5f15", size = 4194773, upload-time = "2025-09-03T12:35:46.775Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/87/eab73cdc990d1141b60237379975efc0e913bfa0d19083daab0f497444a6/ty-0.0.1a22.tar.gz", hash = "sha256:b20ec5362830a1e9e05654c15e88607fdbb45325ec130a9a364c6dd412ecbf55", size = 4312182, upload-time = "2025-10-10T13:07:15.88Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/c8/f7d39392043d5c04936f6cad90e50eb661965ed092ca4bfc01db917d7b8a/ty-0.0.1a20-py3-none-linux_armv6l.whl", hash = "sha256:f73a7aca1f0d38af4d6999b375eb00553f3bfcba102ae976756cc142e14f3450", size = 8443599, upload-time = "2025-09-03T12:35:04.289Z" }, - { url = "https://files.pythonhosted.org/packages/1e/57/5aec78f9b8a677b7439ccded7d66c3361e61247e0f6b14e659b00dd01008/ty-0.0.1a20-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cad12c857ea4b97bf61e02f6796e13061ccca5e41f054cbd657862d80aa43bae", size = 8618102, upload-time = "2025-09-03T12:35:07.448Z" }, - { url = "https://files.pythonhosted.org/packages/15/20/50c9107d93cdb55676473d9dc4e2339af6af606660c9428d3b86a1b2a476/ty-0.0.1a20-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f153b65c7fcb6b8b59547ddb6353761b3e8d8bb6f0edd15e3e3ac14405949f7a", size = 8192167, upload-time = "2025-09-03T12:35:09.706Z" }, - { url = "https://files.pythonhosted.org/packages/85/28/018b2f330109cee19e81c5ca9df3dc29f06c5778440eb9af05d4550c4302/ty-0.0.1a20-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c4336987a6a781d4392a9fd7b3a39edb7e4f3dd4f860e03f46c932b52aefa2", size = 8349256, upload-time = "2025-09-03T12:35:11.76Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c9/2f8797a05587158f52b142278796ffd72c893bc5ad41840fce5aeb65c6f2/ty-0.0.1a20-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ff75cd4c744d09914e8c9db8d99e02f82c9379ad56b0a3fc4c5c9c923cfa84e", size = 8271214, upload-time = "2025-09-03T12:35:13.741Z" }, - { url = "https://files.pythonhosted.org/packages/30/d4/2cac5e5eb9ee51941358cb3139aadadb59520cfaec94e4fcd2b166969748/ty-0.0.1a20-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e26437772be7f7808868701f2bf9e14e706a6ec4c7d02dbd377ff94d7ba60c11", size = 9264939, upload-time = "2025-09-03T12:35:16.896Z" }, - { url = "https://files.pythonhosted.org/packages/93/96/a6f2b54e484b2c6a5488f217882237dbdf10f0fdbdb6cd31333d57afe494/ty-0.0.1a20-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:83a7ee12465841619b5eb3ca962ffc7d576bb1c1ac812638681aee241acbfbbe", size = 9743137, upload-time = "2025-09-03T12:35:19.799Z" }, - { url = "https://files.pythonhosted.org/packages/6e/67/95b40dcbec3d222f3af5fe5dd1ce066d42f8a25a2f70d5724490457048e7/ty-0.0.1a20-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:726d0738be4459ac7ffae312ba96c5f486d6cbc082723f322555d7cba9397871", size = 9368153, upload-time = "2025-09-03T12:35:22.569Z" }, - { url = "https://files.pythonhosted.org/packages/2c/24/689fa4c4270b9ef9a53dc2b1d6ffade259ba2c4127e451f0629e130ea46a/ty-0.0.1a20-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b481f26513f38543df514189fb16744690bcba8d23afee95a01927d93b46e36", size = 9099637, upload-time = "2025-09-03T12:35:24.94Z" }, - { url = "https://files.pythonhosted.org/packages/a1/5b/913011cbf3ea4030097fb3c4ce751856114c9e1a5e1075561a4c5242af9b/ty-0.0.1a20-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7abbe3c02218c12228b1d7c5f98c57240029cc3bcb15b6997b707c19be3908c1", size = 8952000, upload-time = "2025-09-03T12:35:27.288Z" }, - { url = "https://files.pythonhosted.org/packages/df/f9/f5ba2ae455b20c5bb003f9940ef8142a8c4ed9e27de16e8f7472013609db/ty-0.0.1a20-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:fff51c75ee3f7cc6d7722f2f15789ef8ffe6fd2af70e7269ac785763c906688e", size = 8217938, upload-time = "2025-09-03T12:35:29.54Z" }, - { url = "https://files.pythonhosted.org/packages/eb/62/17002cf9032f0981cdb8c898d02422c095c30eefd69ca62a8b705d15bd0f/ty-0.0.1a20-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b4124ab75e0e6f09fe7bc9df4a77ee43c5e0ef7e61b0c149d7c089d971437cbd", size = 8292369, upload-time = "2025-09-03T12:35:31.748Z" }, - { url = "https://files.pythonhosted.org/packages/28/d6/0879b1fb66afe1d01d45c7658f3849aa641ac4ea10679404094f3b40053e/ty-0.0.1a20-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8a138fa4f74e6ed34e9fd14652d132409700c7ff57682c2fed656109ebfba42f", size = 8811973, upload-time = "2025-09-03T12:35:33.997Z" }, - { url = "https://files.pythonhosted.org/packages/60/1e/70bf0348cfe8ba5f7532983f53c508c293ddf5fa9f942ed79a3c4d576df3/ty-0.0.1a20-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8eff8871d6b88d150e2a67beba2c57048f20c090c219f38ed02eebaada04c124", size = 9010990, upload-time = "2025-09-03T12:35:36.766Z" }, - { url = "https://files.pythonhosted.org/packages/b7/ca/03d85c7650359247b1ca3f38a0d869a608ef540450151920e7014ed58292/ty-0.0.1a20-py3-none-win32.whl", hash = "sha256:3c2ace3a22fab4bd79f84c74e3dab26e798bfba7006bea4008d6321c1bd6efc6", size = 8100746, upload-time = "2025-09-03T12:35:40.007Z" }, - { url = "https://files.pythonhosted.org/packages/94/53/7a1937b8c7a66d0c8ed7493de49ed454a850396fe137d2ae12ed247e0b2f/ty-0.0.1a20-py3-none-win_amd64.whl", hash = "sha256:f41e77ff118da3385915e13c3f366b3a2f823461de54abd2e0ca72b170ba0f19", size = 8748861, upload-time = "2025-09-03T12:35:42.175Z" }, - { url = "https://files.pythonhosted.org/packages/27/36/5a3a70c5d497d3332f9e63cabc9c6f13484783b832fecc393f4f1c0c4aa8/ty-0.0.1a20-py3-none-win_arm64.whl", hash = "sha256:d8ac1c5a14cda5fad1a8b53959d9a5d979fe16ce1cc2785ea8676fed143ac85f", size = 8269906, upload-time = "2025-09-03T12:35:45.045Z" }, + { url = "https://files.pythonhosted.org/packages/4d/30/83e2dbfbc70de8a1932b19daf05ce803d7d76cdc6251de1519a49cf1c27d/ty-0.0.1a22-py3-none-linux_armv6l.whl", hash = "sha256:6efba0c777881d2d072fa7375a64ad20357e825eff2a0b6ff9ec80399a04253b", size = 8581795, upload-time = "2025-10-10T13:06:44.396Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8c/5193534fc4a3569f517408828d077b26d6280fe8c2dd0bdc63db4403dcdb/ty-0.0.1a22-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:2ada020eebe1b44403affdf45cd5c8d3fb8312c3e80469d795690093c0921f55", size = 8682602, upload-time = "2025-10-10T13:06:46.44Z" }, + { url = "https://files.pythonhosted.org/packages/22/4a/7ba53493bf37b61d3e0dfe6df910e6bc74c40d16c3effd84e15c0863d34e/ty-0.0.1a22-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ed4f11f1a5824ea10d3e46b1990d092c3f341b1d492c357d23bed2ac347fd253", size = 8278839, upload-time = "2025-10-10T13:06:48.688Z" }, + { url = "https://files.pythonhosted.org/packages/52/0a/d9862c41b9615de56d2158bfbb5177dbf5a65e94922d3dd13855f48cb91b/ty-0.0.1a22-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56f48d8f94292909d596dbeb56ff7f9f070bd316aa628b45c02ca2b2f5797f31", size = 8421483, upload-time = "2025-10-10T13:06:50.75Z" }, + { url = "https://files.pythonhosted.org/packages/a5/cb/3ebe0e45b80724d4c2f849fdf304179727fd06df7fee7cd12fe6c3efe49d/ty-0.0.1a22-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:733e9ac22885b6574de26bdbae439c960a06acc825a938d3780c9d498bb65339", size = 8419225, upload-time = "2025-10-10T13:06:52.533Z" }, + { url = "https://files.pythonhosted.org/packages/4f/b5/da65f3f8ad31d881ca9987a3f6f26069a0cc649c9354adb7453ca62116bb/ty-0.0.1a22-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5135d662484e56809c77b3343614005585caadaa5c1cf643ed6a09303497652b", size = 9352336, upload-time = "2025-10-10T13:06:54.476Z" }, + { url = "https://files.pythonhosted.org/packages/a3/24/9c46f2eb16734ab0fcf3291486b1c5c528a1569f94541dc1f19f97dd2a5b/ty-0.0.1a22-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:87f297f99a98154d33a3f21991979418c65d8bf480f6a1bad1e54d46d2dc7df7", size = 9857840, upload-time = "2025-10-10T13:06:56.514Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ae/930c94bbbe5c049eae5355a197c39522844f55c7ab7fccd0ba061f618541/ty-0.0.1a22-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3310217eaa4dccf20b7336fcbeb072097addc6fde0c9d3f791dea437af0aa6dc", size = 9452611, upload-time = "2025-10-10T13:06:58.154Z" }, + { url = "https://files.pythonhosted.org/packages/a2/80/d8f594438465c352cf0ebd4072f5ca3be2871153a3cd279ed2f35ecd487c/ty-0.0.1a22-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b032e81012bf5228fd65f01b50e29eb409534b6aac28ee5c48ee3b7b860ddf", size = 9214875, upload-time = "2025-10-10T13:06:59.861Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/f852fb20ac27707de495c39a02aeb056e3368833b7e12888d43b1f61594d/ty-0.0.1a22-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3ffda8149cab0000a21e7a078142073e27a1a9ac03b9a0837aa2f53d1fbebcb", size = 8906715, upload-time = "2025-10-10T13:07:01.926Z" }, + { url = "https://files.pythonhosted.org/packages/40/4d/0e0b85b4179891cc3067a6e717f5161921c07873a4f545963fdf1dd3619c/ty-0.0.1a22-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:afa512e7dc78f0cf0b55f87394968ba59c46993c67bc0ef295962144fea85b12", size = 8350873, upload-time = "2025-10-10T13:07:03.999Z" }, + { url = "https://files.pythonhosted.org/packages/a1/1f/e70c63e12b4a0d97d4fd6f872dd199113666ad1b236e18838fa5e5d5502d/ty-0.0.1a22-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:069cdbbea6025f7ebbb5e9043c8d0daf760358df46df8304ef5ca5bb3e320aef", size = 8442568, upload-time = "2025-10-10T13:07:05.745Z" }, + { url = "https://files.pythonhosted.org/packages/de/3b/55518906cb3598f2b99ff1e86c838d77d006cab70cdd2a0a625d02ccb52c/ty-0.0.1a22-py3-none-musllinux_1_2_i686.whl", hash = "sha256:67d31d902e6fd67a4b3523604f635e71d2ec55acfb9118f984600584bfe0ff2a", size = 8896775, upload-time = "2025-10-10T13:07:08.02Z" }, + { url = "https://files.pythonhosted.org/packages/c3/ea/60c654c27931bf84fa9cb463a4c4c49e8869c052fa607a6e930be717b619/ty-0.0.1a22-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:f9e154f262162e6f76b01f318e469ac6c22ffce22b010c396ed34e81d8369821", size = 9054544, upload-time = "2025-10-10T13:07:09.675Z" }, + { url = "https://files.pythonhosted.org/packages/6c/60/9a6d5530d6829ccf656e6ae0fb13d70a4e2514f4fb8910266ebd54286620/ty-0.0.1a22-py3-none-win32.whl", hash = "sha256:37525433ca7b02a8fca4b8fa9dcde818bf3a413b539b9dbc8f7b39d124eb7c49", size = 8165703, upload-time = "2025-10-10T13:07:11.378Z" }, + { url = "https://files.pythonhosted.org/packages/14/9c/ac08c832643850d4e18cbc959abc69cd51d531fe11bdb691098b3cf2f562/ty-0.0.1a22-py3-none-win_amd64.whl", hash = "sha256:75d21cdeba8bcef247af89518d7ce98079cac4a55c4160cb76682ea40a18b92c", size = 8828319, upload-time = "2025-10-10T13:07:12.815Z" }, + { url = "https://files.pythonhosted.org/packages/22/df/38068fc44e3cfb455aeb41d0ff1850a4d3c9988010466d4a8d19860b8b9a/ty-0.0.1a22-py3-none-win_arm64.whl", hash = "sha256:1c7f040fe311e9696917417434c2a0e58402235be842c508002c6a2eff1398b0", size = 8367136, upload-time = "2025-10-10T13:07:14.518Z" }, ] [[package]] name = "typer" -version = "0.17.4" +version = "0.19.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -6077,9 +6097,9 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/92/e8/2a73ccf9874ec4c7638f172efc8972ceab13a0e3480b389d6ed822f7a822/typer-0.17.4.tar.gz", hash = "sha256:b77dc07d849312fd2bb5e7f20a7af8985c7ec360c45b051ed5412f64d8dc1580", size = 103734, upload-time = "2025-09-05T18:14:40.746Z" } +sdist = { url = "https://files.pythonhosted.org/packages/21/ca/950278884e2ca20547ff3eb109478c6baf6b8cf219318e6bc4f666fad8e8/typer-0.19.2.tar.gz", hash = "sha256:9ad824308ded0ad06cc716434705f691d4ee0bfd0fb081839d2e426860e7fdca", size = 104755, upload-time = "2025-09-23T09:47:48.256Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/72/6b3e70d32e89a5cbb6a4513726c1ae8762165b027af569289e19ec08edd8/typer-0.17.4-py3-none-any.whl", hash = "sha256:015534a6edaa450e7007eba705d5c18c3349dcea50a6ad79a5ed530967575824", size = 46643, upload-time = "2025-09-05T18:14:39.166Z" }, + { url = "https://files.pythonhosted.org/packages/00/22/35617eee79080a5d071d0f14ad698d325ee6b3bf824fc0467c03b30e7fa8/typer-0.19.2-py3-none-any.whl", hash = "sha256:755e7e19670ffad8283db353267cb81ef252f595aa6834a0d1ca9312d9326cb9", size = 46748, upload-time = "2025-09-23T09:47:46.777Z" }, ] [[package]] @@ -6093,11 +6113,11 @@ wheels = [ [[package]] name = "types-awscrt" -version = "0.27.6" +version = "0.28.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/56/ce/5d84526a39f44c420ce61b16654193f8437d74b54f21597ea2ac65d89954/types_awscrt-0.27.6.tar.gz", hash = "sha256:9d3f1865a93b8b2c32f137514ac88cb048b5bc438739945ba19d972698995bfb", size = 16937, upload-time = "2025-08-13T01:54:54.659Z" } +sdist = { url = "https://files.pythonhosted.org/packages/60/19/a3a6377c9e2e389c1421c033a1830c29cac08f2e1e05a082ea84eb22c75f/types_awscrt-0.28.1.tar.gz", hash = "sha256:66d77ec283e1dc907526a44511a12624118723a396c36d3f3dd9855cb614ce14", size = 17410, upload-time = "2025-10-11T21:55:07.443Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/af/e3d20e3e81d235b3964846adf46a334645a8a9b25a0d3d472743eb079552/types_awscrt-0.27.6-py3-none-any.whl", hash = "sha256:18aced46da00a57f02eb97637a32e5894dc5aa3dc6a905ba3e5ed85b9f3c526b", size = 39626, upload-time = "2025-08-13T01:54:53.454Z" }, + { url = "https://files.pythonhosted.org/packages/ea/c7/0266b797d19b82aebe0e177efe35de7aabdc192bc1605ce3309331f0a505/types_awscrt-0.28.1-py3-none-any.whl", hash = "sha256:d88f43ef779f90b841ba99badb72fe153077225a4e426ae79e943184827b4443", size = 41851, upload-time = "2025-10-11T21:55:06.235Z" }, ] [[package]] @@ -6123,14 +6143,14 @@ wheels = [ [[package]] name = "types-cffi" -version = "1.17.0.20250822" +version = "1.17.0.20250915" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/0c/76a48cb6e742cac4d61a4ec632dd30635b6d302f5acdc2c0a27572ac7ae3/types_cffi-1.17.0.20250822.tar.gz", hash = "sha256:bf6f5a381ea49da7ff895fae69711271e6192c434470ce6139bf2b2e0d0fa08d", size = 17130, upload-time = "2025-08-22T03:04:02.445Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/98/ea454cea03e5f351323af6a482c65924f3c26c515efd9090dede58f2b4b6/types_cffi-1.17.0.20250915.tar.gz", hash = "sha256:4362e20368f78dabd5c56bca8004752cc890e07a71605d9e0d9e069dbaac8c06", size = 17229, upload-time = "2025-09-15T03:01:25.31Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/21/f7/68029931e7539e3246b33386a19c475f234c71d2a878411847b20bb31960/types_cffi-1.17.0.20250822-py3-none-any.whl", hash = "sha256:183dd76c1871a48936d7b931488e41f0f25a7463abe10b5816be275fc11506d5", size = 20083, upload-time = "2025-08-22T03:04:01.466Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ec/092f2b74b49ec4855cdb53050deb9699f7105b8fda6fe034c0781b8687f3/types_cffi-1.17.0.20250915-py3-none-any.whl", hash = "sha256:cef4af1116c83359c11bb4269283c50f0688e9fc1d7f0eeb390f3661546da52c", size = 20112, upload-time = "2025-09-15T03:01:24.187Z" }, ] [[package]] @@ -6218,11 +6238,11 @@ wheels = [ [[package]] name = "types-html5lib" -version = "1.1.11.20250809" +version = "1.1.11.20250917" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/ab/6aa4c487ae6f4f9da5153143bdc9e9b4fbc2b105df7ef8127fb920dc1f21/types_html5lib-1.1.11.20250809.tar.gz", hash = "sha256:7976ec7426bb009997dc5e072bca3ed988dd747d0cbfe093c7dfbd3d5ec8bf57", size = 16793, upload-time = "2025-08-09T03:14:20.819Z" } +sdist = { url = "https://files.pythonhosted.org/packages/68/4b/a970718e8bd9324ee8fb8eaf02ff069f6d03c20d4523bb4232892ecc3d06/types_html5lib-1.1.11.20250917.tar.gz", hash = "sha256:7b52743377f33f9b4fd7385afbd2d457b8864ee51f90ff2a795ad9e8c053373a", size = 16868, upload-time = "2025-09-17T02:47:41.18Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/05/328a2d6ecbd8aa3e16512600da78b1fe4605125896794a21824f3cac6f14/types_html5lib-1.1.11.20250809-py3-none-any.whl", hash = "sha256:e5f48ab670ae4cdeafd88bbc47113d8126dcf08318e0b8d70df26ecc13eca9b6", size = 22867, upload-time = "2025-08-09T03:14:20.048Z" }, + { url = "https://files.pythonhosted.org/packages/78/8a/da91a9c64dcb5e69beb567519857411996d8ecae9f6f128bcef8260e7a8d/types_html5lib-1.1.11.20250917-py3-none-any.whl", hash = "sha256:b294fd06d60da205daeb2f615485ca4d475088d2eff1009cf427f4a80fcd5346", size = 22908, upload-time = "2025-09-17T02:47:40.39Z" }, ] [[package]] @@ -6284,20 +6304,20 @@ wheels = [ [[package]] name = "types-openpyxl" -version = "3.1.5.20250822" +version = "3.1.5.20250919" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/7f/ea358482217448deafdb9232f198603511d2efa99e429822256f2b38975a/types_openpyxl-3.1.5.20250822.tar.gz", hash = "sha256:c8704a163e3798290d182c13c75da85f68cd97ff9b35f0ebfb94cf72f8b67bb3", size = 100858, upload-time = "2025-08-22T03:03:31.835Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/12/8bc4a25d49f1e4b7bbca868daa3ee80b1983d8137b4986867b5b65ab2ecd/types_openpyxl-3.1.5.20250919.tar.gz", hash = "sha256:232b5906773eebace1509b8994cdadda043f692cfdba9bfbb86ca921d54d32d7", size = 100880, upload-time = "2025-09-19T02:54:39.997Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/e8/cac4728e8dcbeb69d6de7de26bb9edb508e9f5c82476ecda22b58b939e60/types_openpyxl-3.1.5.20250822-py3-none-any.whl", hash = "sha256:da7a430d99c48347acf2dc351695f9db6ff90ecb761fed577b4a98fef2d0f831", size = 166093, upload-time = "2025-08-22T03:03:30.686Z" }, + { url = "https://files.pythonhosted.org/packages/36/3c/d49cf3f4489a10e9ddefde18fd258f120754c5825d06d145d9a0aaac770b/types_openpyxl-3.1.5.20250919-py3-none-any.whl", hash = "sha256:bd06f18b12fd5e1c9f0b666ee6151d8140216afa7496f7ebb9fe9d33a1a3ce99", size = 166078, upload-time = "2025-09-19T02:54:38.657Z" }, ] [[package]] name = "types-pexpect" -version = "4.9.0.20250809" +version = "4.9.0.20250916" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/a2/29564e69dee62f0f887ba7bfffa82fa4975504952e6199b218d3b403becd/types_pexpect-4.9.0.20250809.tar.gz", hash = "sha256:17a53c785b847c90d0be9149b00b0254e6e92c21cd856e853dac810ddb20101f", size = 13240, upload-time = "2025-08-09T03:15:04.554Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/e6/cc43e306dc7de14ec7861c24ac4957f688741ae39ae685049695d796b587/types_pexpect-4.9.0.20250916.tar.gz", hash = "sha256:69e5fed6199687a730a572de780a5749248a4c5df2ff1521e194563475c9928d", size = 13322, upload-time = "2025-09-16T02:49:25.61Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/1b/4d557287e6672feb749cf0d8ef5eb19189aff043e73e509e3775febc1cf1/types_pexpect-4.9.0.20250809-py3-none-any.whl", hash = "sha256:d19d206b8a7c282dac9376f26f072e036d22e9cf3e7d8eba3f477500b1f39101", size = 17039, upload-time = "2025-08-09T03:15:03.528Z" }, + { url = "https://files.pythonhosted.org/packages/aa/6d/7740e235a9fb2570968da7d386d7feb511ce68cd23472402ff8cdf7fc78f/types_pexpect-4.9.0.20250916-py3-none-any.whl", hash = "sha256:7fa43cb96042ac58bc74f7c28e5d85782be0ee01344149886849e9d90936fe8a", size = 17057, upload-time = "2025-09-16T02:49:24.546Z" }, ] [[package]] @@ -6311,20 +6331,20 @@ wheels = [ [[package]] name = "types-psutil" -version = "7.0.0.20250822" +version = "7.0.0.20251001" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6d/aa/09699c829d7cc4624138d3ae67eecd4de9574e55729b1c63ca3e5a657f86/types_psutil-7.0.0.20250822.tar.gz", hash = "sha256:226cbc0c0ea9cc0a50b8abcc1d91a26c876dcb40be238131f697883690419698", size = 20358, upload-time = "2025-08-22T03:02:04.556Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/91/b020f9100b196a1f247cd12575f68dcdad94f032c1e0c42987d7632142ce/types_psutil-7.0.0.20251001.tar.gz", hash = "sha256:60d696200ddae28677e7d88cdebd6e960294e85adefbaafe0f6e5d0e7b4c1963", size = 20469, upload-time = "2025-10-01T03:04:21.292Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7d/46/45006309e20859e12c024d91bb913e6b89a706cd6f9377031c9f7e274ece/types_psutil-7.0.0.20250822-py3-none-any.whl", hash = "sha256:81c82f01aba5a4510b9d8b28154f577b780be75a08954aed074aa064666edc09", size = 23110, upload-time = "2025-08-22T03:02:03.38Z" }, + { url = "https://files.pythonhosted.org/packages/c0/99/50f30e0b648e6f583165cb2e535b0256a02a03efa4868cb2f017ad25b3d8/types_psutil-7.0.0.20251001-py3-none-any.whl", hash = "sha256:adc31de8386d31c61bd4123112fd51e2c700c7502a001cad72a3d56ba6b463d1", size = 23164, upload-time = "2025-10-01T03:04:20.089Z" }, ] [[package]] name = "types-psycopg2" -version = "2.9.21.20250809" +version = "2.9.21.20251012" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/17/d0/66f3f04bab48bfdb2c8b795b2b3e75eb20c7d1fb0516916db3be6aa4a683/types_psycopg2-2.9.21.20250809.tar.gz", hash = "sha256:b7c2cbdcf7c0bd16240f59ba694347329b0463e43398de69784ea4dee45f3c6d", size = 26539, upload-time = "2025-08-09T03:14:54.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9b/b3/2d09eaf35a084cffd329c584970a3fa07101ca465c13cad1576d7c392587/types_psycopg2-2.9.21.20251012.tar.gz", hash = "sha256:4cdafd38927da0cfde49804f39ab85afd9c6e9c492800e42f1f0c1a1b0312935", size = 26710, upload-time = "2025-10-12T02:55:39.5Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/98/182497602921c47fadc8470d51a32e5c75343c8931c0b572a5c4ae3b948b/types_psycopg2-2.9.21.20250809-py3-none-any.whl", hash = "sha256:59b7b0ed56dcae9efae62b8373497274fc1a0484bdc5135cdacbe5a8f44e1d7b", size = 24824, upload-time = "2025-08-09T03:14:53.908Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0c/05feaf8cb51159f2c0af04b871dab7e98a2f83a3622f5f216331d2dd924c/types_psycopg2-2.9.21.20251012-py3-none-any.whl", hash = "sha256:712bad5c423fe979e357edbf40a07ca40ef775d74043de72bd4544ca328cc57e", size = 24883, upload-time = "2025-10-12T02:55:38.439Z" }, ] [[package]] @@ -6341,11 +6361,11 @@ wheels = [ [[package]] name = "types-pymysql" -version = "1.1.0.20250909" +version = "1.1.0.20250916" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ae/0f/bb4331221fd560379ec702d61a11d5a5eead9a2866bb39eae294bde29988/types_pymysql-1.1.0.20250909.tar.gz", hash = "sha256:5ba7230425635b8c59316353701b99a087b949e8002dfeff652be0b62cee445b", size = 22189, upload-time = "2025-09-09T02:55:31.039Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/12/bda1d977c07e0e47502bede1c44a986dd45946494d89e005e04cdeb0f8de/types_pymysql-1.1.0.20250916.tar.gz", hash = "sha256:98d75731795fcc06723a192786662bdfa760e1e00f22809c104fbb47bac5e29b", size = 22131, upload-time = "2025-09-16T02:49:22.039Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/35/5681d881506a31bbbd9f7d5f6edcbf65489835081965b539b0802a665036/types_pymysql-1.1.0.20250909-py3-none-any.whl", hash = "sha256:c9957d4c10a31748636da5c16b0a0eef6751354d05adcd1b86acb27e8df36fb6", size = 23179, upload-time = "2025-09-09T02:55:29.873Z" }, + { url = "https://files.pythonhosted.org/packages/21/eb/a225e32a6e7b196af67ab2f1b07363595f63255374cc3b88bfdab53b4ee8/types_pymysql-1.1.0.20250916-py3-none-any.whl", hash = "sha256:873eb9836bb5e3de4368cc7010ca72775f86e9692a5c7810f8c7f48da082e55b", size = 23063, upload-time = "2025-09-16T02:49:20.933Z" }, ] [[package]] @@ -6363,11 +6383,11 @@ wheels = [ [[package]] name = "types-python-dateutil" -version = "2.9.0.20250822" +version = "2.9.0.20251008" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0c/0a/775f8551665992204c756be326f3575abba58c4a3a52eef9909ef4536428/types_python_dateutil-2.9.0.20250822.tar.gz", hash = "sha256:84c92c34bd8e68b117bff742bc00b692a1e8531262d4507b33afcc9f7716cd53", size = 16084, upload-time = "2025-08-22T03:02:00.613Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/83/24ed25dd0c6277a1a170c180ad9eef5879ecc9a4745b58d7905a4588c80d/types_python_dateutil-2.9.0.20251008.tar.gz", hash = "sha256:c3826289c170c93ebd8360c3485311187df740166dbab9dd3b792e69f2bc1f9c", size = 16128, upload-time = "2025-10-08T02:51:34.93Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/d9/a29dfa84363e88b053bf85a8b7f212a04f0d7343a4d24933baa45c06e08b/types_python_dateutil-2.9.0.20250822-py3-none-any.whl", hash = "sha256:849d52b737e10a6dc6621d2bd7940ec7c65fcb69e6aa2882acf4e56b2b508ddc", size = 17892, upload-time = "2025-08-22T03:01:59.436Z" }, + { url = "https://files.pythonhosted.org/packages/da/af/5d24b8d49ef358468ecfdff5c556adf37f4fd28e336b96f923661a808329/types_python_dateutil-2.9.0.20251008-py3-none-any.whl", hash = "sha256:b9a5232c8921cf7661b29c163ccc56055c418ab2c6eabe8f917cbcc73a4c4157", size = 17934, upload-time = "2025-10-08T02:51:33.55Z" }, ] [[package]] @@ -6399,11 +6419,11 @@ wheels = [ [[package]] name = "types-pyyaml" -version = "6.0.12.20250822" +version = "6.0.12.20250915" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/49/85/90a442e538359ab5c9e30de415006fb22567aa4301c908c09f19e42975c2/types_pyyaml-6.0.12.20250822.tar.gz", hash = "sha256:259f1d93079d335730a9db7cff2bcaf65d7e04b4a56b5927d49a612199b59413", size = 17481, upload-time = "2025-08-22T03:02:16.209Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522, upload-time = "2025-09-15T03:01:00.728Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/8e/8f0aca667c97c0d76024b37cffa39e76e2ce39ca54a38f285a64e6ae33ba/types_pyyaml-6.0.12.20250822-py3-none-any.whl", hash = "sha256:1fe1a5e146aa315483592d292b72a172b65b946a6d98aa6ddd8e4aa838ab7098", size = 20314, upload-time = "2025-08-22T03:02:15.002Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338, upload-time = "2025-09-15T03:00:59.218Z" }, ] [[package]] @@ -6430,23 +6450,23 @@ wheels = [ [[package]] name = "types-requests" -version = "2.32.4.20250809" +version = "2.32.4.20250913" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/b0/9355adb86ec84d057fea765e4c49cce592aaf3d5117ce5609a95a7fc3dac/types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", size = 23027, upload-time = "2025-08-09T03:17:10.664Z" } +sdist = { url = "https://files.pythonhosted.org/packages/36/27/489922f4505975b11de2b5ad07b4fe1dca0bca9be81a703f26c5f3acfce5/types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d", size = 23113, upload-time = "2025-09-13T02:40:02.309Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/6f/ec0012be842b1d888d46884ac5558fd62aeae1f0ec4f7a581433d890d4b5/types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163", size = 20644, upload-time = "2025-08-09T03:17:09.716Z" }, + { url = "https://files.pythonhosted.org/packages/2a/20/9a227ea57c1285986c4cf78400d0a91615d25b24e257fd9e2969606bdfae/types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1", size = 20658, upload-time = "2025-09-13T02:40:01.115Z" }, ] [[package]] name = "types-s3transfer" -version = "0.13.1" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/c5/23946fac96c9dd5815ec97afd1c8ad6d22efa76c04a79a4823f2f67692a5/types_s3transfer-0.13.1.tar.gz", hash = "sha256:ce488d79fdd7d3b9d39071939121eca814ec65de3aa36bdce1f9189c0a61cc80", size = 14181, upload-time = "2025-08-31T16:57:06.93Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/9b/8913198b7fc700acc1dcb84827137bb2922052e43dde0f4fb0ed2dc6f118/types_s3transfer-0.14.0.tar.gz", hash = "sha256:17f800a87c7eafab0434e9d87452c809c290ae906c2024c24261c564479e9c95", size = 14218, upload-time = "2025-10-11T21:11:27.892Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/dc/b3f9b5c93eed6ffe768f4972661250584d5e4f248b548029026964373bcd/types_s3transfer-0.13.1-py3-none-any.whl", hash = "sha256:4ff730e464a3fd3785b5541f0f555c1bd02ad408cf82b6b7a95429f6b0d26b4a", size = 19617, upload-time = "2025-08-31T16:57:05.73Z" }, + { url = "https://files.pythonhosted.org/packages/92/c3/4dfb2e87c15ca582b7d956dfb7e549de1d005c758eb9a305e934e1b83fda/types_s3transfer-0.14.0-py3-none-any.whl", hash = "sha256:108134854069a38b048e9b710b9b35904d22a9d0f37e4e1889c2e6b58e5b3253", size = 19697, upload-time = "2025-10-11T21:11:26.749Z" }, ] [[package]] @@ -6481,25 +6501,25 @@ wheels = [ [[package]] name = "types-six" -version = "1.17.0.20250515" +version = "1.17.0.20251009" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/78/344047eeced8d230140aa3d9503aa969acb61c6095e7308bbc1ff1de3865/types_six-1.17.0.20250515.tar.gz", hash = "sha256:f4f7f0398cb79304e88397336e642b15e96fbeacf5b96d7625da366b069d2d18", size = 15598, upload-time = "2025-05-15T03:04:19.806Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/f7/448215bc7695cfa0c8a7e0dcfa54fe31b1d52fb87004fed32e659dd85c80/types_six-1.17.0.20251009.tar.gz", hash = "sha256:efe03064ecd0ffb0f7afe133990a2398d8493d8d1c1cc10ff3dfe476d57ba44f", size = 15552, upload-time = "2025-10-09T02:54:26.02Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/85/5ee1c8e35b33b9c8ea1816d5a4e119c27f8bb1539b73b1f636f07aa64750/types_six-1.17.0.20250515-py3-none-any.whl", hash = "sha256:adfaa9568caf35e03d80ffa4ed765c33b282579c869b40bf4b6009c7d8db3fb1", size = 19987, upload-time = "2025-05-15T03:04:18.556Z" }, + { url = "https://files.pythonhosted.org/packages/b8/2f/94baa623421940e3eb5d2fc63570ebb046f2bb4d9573b8787edab3ed2526/types_six-1.17.0.20251009-py3-none-any.whl", hash = "sha256:2494f4c2a58ada0edfe01ea84b58468732e43394c572d9cf5b1dd06d86c487a3", size = 19935, upload-time = "2025-10-09T02:54:25.096Z" }, ] [[package]] name = "types-tensorflow" -version = "2.18.0.20250809" +version = "2.18.0.20251008" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, { name = "types-protobuf" }, { name = "types-requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/07/84/d350f0170a043283cd805344658522b00d769d04753b5a1685c1c8a06731/types_tensorflow-2.18.0.20250809.tar.gz", hash = "sha256:9ed54cbb24c8b12d8c59b9a8afbf7c5f2d46d5e2bf42d00ececaaa79e21d7ed1", size = 257495, upload-time = "2025-08-09T03:17:36.093Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/0a/13bde03fb5a23faaadcca2d6914f865e444334133902310ea05e6ade780c/types_tensorflow-2.18.0.20251008.tar.gz", hash = "sha256:8db03d4dd391a362e2ea796ffdbccb03c082127606d4d852edb7ed9504745933", size = 257550, upload-time = "2025-10-08T02:51:51.104Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/1c/cc50c17971643a92d5973d35a3d35f017f9d759d95fb7fdafa568a59ba9c/types_tensorflow-2.18.0.20250809-py3-none-any.whl", hash = "sha256:e9aae9da92ddb9991ebd27117db2c2dffe29d7d019db2a70166fd0d099c4fa4f", size = 329000, upload-time = "2025-08-09T03:17:35.02Z" }, + { url = "https://files.pythonhosted.org/packages/66/cc/e50e49db621b0cf03c1f3d10be47389de41a02dc9924c3a83a9c1a55bf28/types_tensorflow-2.18.0.20251008-py3-none-any.whl", hash = "sha256:d6b0dd4d81ac6d9c5af803ebcc8ce0f65c5850c063e8b9789dc828898944b5f4", size = 329023, upload-time = "2025-10-08T02:51:50.024Z" }, ] [[package]] @@ -6547,14 +6567,14 @@ wheels = [ [[package]] name = "typing-inspection" -version = "0.4.1" +version = "0.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] @@ -6696,11 +6716,11 @@ wheels = [ [[package]] name = "urllib3" -version = "2.5.0" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268, upload-time = "2024-12-22T07:47:30.032Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, + { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369, upload-time = "2024-12-22T07:47:28.074Z" }, ] [[package]] @@ -6714,15 +6734,15 @@ wheels = [ [[package]] name = "uvicorn" -version = "0.35.0" +version = "0.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, + { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, ] [package.optional-dependencies] @@ -6794,7 +6814,7 @@ wheels = [ [[package]] name = "wandb" -version = "0.21.4" +version = "0.22.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -6808,17 +6828,17 @@ dependencies = [ { name = "sentry-sdk" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/59/a8/aaa3f3f8e410f34442466aac10b1891b3084d35b98aef59ebcb4c0efb941/wandb-0.21.4.tar.gz", hash = "sha256:b350d50973409658deb455010fafcfa81e6be3470232e316286319e839ffb67b", size = 40175929, upload-time = "2025-09-11T21:14:29.161Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c1/a8/680bd77e11a278e6c14a2cb4646e8ab9525b2baaa81c3d12dc0f616aa4aa/wandb-0.22.2.tar.gz", hash = "sha256:510f5a1ac30d16921c36c3b932da852f046641d4aee98a86a7f5ec03a6e95bda", size = 41401439, upload-time = "2025-10-07T19:54:21.88Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/6b/3a8d9db18a4c4568599a8792c0c8b1f422d9864c7123e8301a9477fbf0ac/wandb-0.21.4-py3-none-macosx_10_14_x86_64.whl", hash = "sha256:c681ef7adb09925251d8d995c58aa76ae86a46dbf8de3b67353ad99fdef232d5", size = 18845369, upload-time = "2025-09-11T21:14:02.879Z" }, - { url = "https://files.pythonhosted.org/packages/60/e0/d7d6818938ec6958c93d979f9a90ea3d06bdc41e130b30f8cd89ae03c245/wandb-0.21.4-py3-none-macosx_12_0_arm64.whl", hash = "sha256:d35acc65c10bb7ac55d1331f7b1b8ab761f368f7b051131515f081a56ea5febc", size = 18339122, upload-time = "2025-09-11T21:14:06.455Z" }, - { url = "https://files.pythonhosted.org/packages/13/29/9bb8ed4adf32bed30e4d5df74d956dd1e93b6fd4bbc29dbe84167c84804b/wandb-0.21.4-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:765e66b57b7be5f393ecebd9a9d2c382c9f979d19cdee4a3f118eaafed43fca1", size = 19081975, upload-time = "2025-09-11T21:14:09.317Z" }, - { url = "https://files.pythonhosted.org/packages/30/6e/4aa33bc2c56b70c0116e73687c72c7a674f4072442633b3b23270d2215e3/wandb-0.21.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06127ec49245d12fdb3922c1eca1ab611cefc94adabeaaaba7b069707c516cba", size = 18161358, upload-time = "2025-09-11T21:14:12.092Z" }, - { url = "https://files.pythonhosted.org/packages/f7/56/d9f845ecfd5e078cf637cb29d8abe3350b8a174924c54086168783454a8f/wandb-0.21.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48d4f65f1be5f5a25b868695e09cdbfe481678220df349a8c2cbed3992fb497f", size = 19602680, upload-time = "2025-09-11T21:14:14.987Z" }, - { url = "https://files.pythonhosted.org/packages/68/ea/237a3c2b679a35e02e577c5bf844d6a221a7d32925ab8d5230529e9f2841/wandb-0.21.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ebd11f78351a3ca22caa1045146a6d2ad9e62fed6d0de2e67a0db5710d75103a", size = 18166392, upload-time = "2025-09-11T21:14:17.478Z" }, - { url = "https://files.pythonhosted.org/packages/12/e3/dbf2c575c79c99d94f16ce1a2cbbb2529d5029a76348c1ddac7e47f6873f/wandb-0.21.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:595b9e77591a805653e05db8b892805ee0a5317d147ef4976353e4f1cc16ebdc", size = 19678800, upload-time = "2025-09-11T21:14:20.264Z" }, - { url = "https://files.pythonhosted.org/packages/fa/eb/4ed04879d697772b8eb251c0e5af9a4ff7e2cc2b3fcd4b8eee91253ec2f1/wandb-0.21.4-py3-none-win32.whl", hash = "sha256:f9c86eb7eb7d40c6441533428188b1ae3205674e80c940792d850e2c1fe8d31e", size = 18738950, upload-time = "2025-09-11T21:14:23.08Z" }, - { url = "https://files.pythonhosted.org/packages/c3/4a/86c5e19600cb6a616a45f133c26826b46133499cd72d592772929d530ccd/wandb-0.21.4-py3-none-win_amd64.whl", hash = "sha256:2da3d5bb310a9f9fb7f680f4aef285348095a4cc6d1ce22b7343ba4e3fffcd84", size = 18738953, upload-time = "2025-09-11T21:14:25.539Z" }, + { url = "https://files.pythonhosted.org/packages/e7/b3/8c637fb594cfd574ce9c9f7d0ac2f2d12742eb38ec59dcbb713beae95343/wandb-0.22.2-py3-none-macosx_12_0_arm64.whl", hash = "sha256:2e29c9fa4462b5411b2cd2175ae33eff4309c91de7c426bca6bc8e7abc7e5dec", size = 18677549, upload-time = "2025-10-07T19:54:00.839Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f3/e309a726eaebddad6b8d9a73a50891e5796962ec8a091bb6a61d31692d1e/wandb-0.22.2-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:c42d594cd7a9da4fd39ecdb0abbc081b61f304123277b2b6c4ba84283956fd21", size = 19715188, upload-time = "2025-10-07T19:54:03.805Z" }, + { url = "https://files.pythonhosted.org/packages/f9/73/fad59910215876008f4781b57d828d1b19b3677c9b46af615e7229746435/wandb-0.22.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5188d84e66d3fd584f3b3ae4d2a70e78f29403c0528e6aecaa4188a1fcf54d8", size = 18463148, upload-time = "2025-10-07T19:54:05.676Z" }, + { url = "https://files.pythonhosted.org/packages/87/11/572c1913b5b92e4c519f735adfae572b46f2d79d99ede63eec0d6a272d6e/wandb-0.22.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88ccd484af9f21cfc127976793c3cf66cfe1acd75bd8cd650086a64e88bac4bf", size = 19908645, upload-time = "2025-10-07T19:54:07.693Z" }, + { url = "https://files.pythonhosted.org/packages/6d/0d/133aa82f5a505ba638b4fda5014cefddfe7f1f6238ef4afc0871ec61c41f/wandb-0.22.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:abf0ed175e791af64110e0a0b99ce02bbbbd1017722bc32d3bc328efb86450cd", size = 18501348, upload-time = "2025-10-07T19:54:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d5/776203be2601872f01dacc6a5b4274106ec0db7cd3bf2cdb3b741f8fc932/wandb-0.22.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:44e77c56403b90bf3473a7ca3bfc4d42c636b7c0e31a5fb9cd0382f08302f74b", size = 20001756, upload-time = "2025-10-07T19:54:12.452Z" }, + { url = "https://files.pythonhosted.org/packages/30/43/ae3fa46e20b1d9a6508dd9abe716d57205c038ed4661c5c98ace48a60eac/wandb-0.22.2-py3-none-win32.whl", hash = "sha256:44d12bd379dbe15be5ceed6bdf23803d42f648ba0dd111297b4c47a3c7be6dbd", size = 19075950, upload-time = "2025-10-07T19:54:14.892Z" }, + { url = "https://files.pythonhosted.org/packages/09/59/c174321e868205f7a659d1e5ec51f546e62267296d6f4179bb9119294964/wandb-0.22.2-py3-none-win_amd64.whl", hash = "sha256:c95eb221bf316c0872f7ac55071856b9f25f95a2de983ada48acf653ce259386", size = 19075953, upload-time = "2025-10-07T19:54:16.837Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a2/c7c24fda78513cab5686949d8cb36459dbbccbbb4b2b6fc67237ece31a00/wandb-0.22.2-py3-none-win_arm64.whl", hash = "sha256:20d2ab9aa10445aab3d60914a980f002a4f66566e28b0cd156b1e462f0080a0d", size = 17383217, upload-time = "2025-10-07T19:54:19.384Z" }, ] [[package]] @@ -6864,11 +6884,11 @@ wheels = [ [[package]] name = "wcwidth" -version = "0.2.13" +version = "0.2.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, ] [[package]] @@ -6897,16 +6917,20 @@ wheels = [ [[package]] name = "weaviate-client" -version = "3.24.2" +version = "4.17.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "authlib" }, - { name = "requests" }, + { name = "deprecation" }, + { name = "grpcio" }, + { name = "httpx" }, + { name = "protobuf" }, + { name = "pydantic" }, { name = "validators" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/c1/3285a21d8885f2b09aabb65edb9a8e062a35c2d7175e1bb024fa096582ab/weaviate-client-3.24.2.tar.gz", hash = "sha256:6914c48c9a7e5ad0be9399271f9cb85d6f59ab77476c6d4e56a3925bf149edaa", size = 199332, upload-time = "2023-10-04T08:37:54.26Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/0e/e4582b007427187a9fde55fa575db4b766c81929d2b43a3dd8becce50567/weaviate_client-4.17.0.tar.gz", hash = "sha256:731d58d84b0989df4db399b686357ed285fb95971a492ccca8dec90bb2343c51", size = 769019, upload-time = "2025-09-26T11:20:27.381Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/98/3136d05f93e30cf29e1db280eaadf766df18d812dfe7994bcced653b2340/weaviate_client-3.24.2-py3-none-any.whl", hash = "sha256:bc50ca5fcebcd48de0d00f66700b0cf7c31a97c4cd3d29b4036d77c5d1d9479b", size = 107968, upload-time = "2023-10-04T08:37:52.511Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c5/2da3a45866da7a935dab8ad07be05dcaee48b3ad4955144583b651929be7/weaviate_client-4.17.0-py3-none-any.whl", hash = "sha256:60e4a355b90537ee1e942ab0b76a94750897a13d9cf13c5a6decbd166d0ca8b5", size = 582763, upload-time = "2025-09-26T11:20:25.864Z" }, ] [[package]] @@ -6920,11 +6944,11 @@ wheels = [ [[package]] name = "websocket-client" -version = "1.8.0" +version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload-time = "2024-04-23T22:16:16.976Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576, upload-time = "2025-10-07T21:16:36.495Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload-time = "2024-04-23T22:16:14.422Z" }, + { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616, upload-time = "2025-10-07T21:16:34.951Z" }, ] [[package]] @@ -7033,20 +7057,20 @@ wheels = [ [[package]] name = "xlsxwriter" -version = "3.2.5" +version = "3.2.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/47/7704bac42ac6fe1710ae099b70e6a1e68ed173ef14792b647808c357da43/xlsxwriter-3.2.5.tar.gz", hash = "sha256:7e88469d607cdc920151c0ab3ce9cf1a83992d4b7bc730c5ffdd1a12115a7dbe", size = 213306, upload-time = "2025-06-17T08:59:14.619Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/2c/c06ef49dc36e7954e55b802a8b231770d286a9758b3d936bd1e04ce5ba88/xlsxwriter-3.2.9.tar.gz", hash = "sha256:254b1c37a368c444eac6e2f867405cc9e461b0ed97a3233b2ac1e574efb4140c", size = 215940, upload-time = "2025-09-16T00:16:21.63Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/34/a22e6664211f0c8879521328000bdcae9bf6dbafa94a923e531f6d5b3f73/xlsxwriter-3.2.5-py3-none-any.whl", hash = "sha256:4f4824234e1eaf9d95df9a8fe974585ff91d0f5e3d3f12ace5b71e443c1c6abd", size = 172347, upload-time = "2025-06-17T08:59:13.453Z" }, + { url = "https://files.pythonhosted.org/packages/3a/0c/3662f4a66880196a590b202f0db82d919dd2f89e99a27fadef91c4a33d41/xlsxwriter-3.2.9-py3-none-any.whl", hash = "sha256:9a5db42bc5dff014806c58a20b9eae7322a134abb6fce3c92c181bfb275ec5b3", size = 175315, upload-time = "2025-09-16T00:16:20.108Z" }, ] [[package]] name = "xmltodict" -version = "0.15.1" +version = "1.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/7a/42f705c672e77dc3ce85a6823bb289055323aac30de7c4b9eca1e28b2c17/xmltodict-0.15.1.tar.gz", hash = "sha256:3d8d49127f3ce6979d40a36dbcad96f8bab106d232d24b49efdd4bd21716983c", size = 62984, upload-time = "2025-09-08T18:33:19.349Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/aa/917ceeed4dbb80d2f04dbd0c784b7ee7bba8ae5a54837ef0e5e062cd3cfb/xmltodict-1.0.2.tar.gz", hash = "sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649", size = 25725, upload-time = "2025-09-17T21:59:26.459Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/4e/001c53a22f6bd5f383f49915a53e40f0cab2d3f1884d968f3ae14be367b7/xmltodict-0.15.1-py2.py3-none-any.whl", hash = "sha256:dcd84b52f30a15be5ac4c9099a0cb234df8758624b035411e329c5c1e7a49089", size = 11260, upload-time = "2025-09-08T18:33:17.87Z" }, + { url = "https://files.pythonhosted.org/packages/c0/20/69a0e6058bc5ea74892d089d64dfc3a62ba78917ec5e2cfa70f7c92ba3a5/xmltodict-1.0.2-py3-none-any.whl", hash = "sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d", size = 13893, upload-time = "2025-09-17T21:59:24.859Z" }, ] [[package]] @@ -7118,65 +7142,62 @@ wheels = [ [[package]] name = "zope-interface" -version = "8.0" +version = "8.0.1" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "setuptools" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/68/21/a6af230243831459f7238764acb3086a9cf96dbf405d8084d30add1ee2e7/zope_interface-8.0.tar.gz", hash = "sha256:b14d5aac547e635af749ce20bf49a3f5f93b8a854d2a6b1e95d4d5e5dc618f7d", size = 253397, upload-time = "2025-09-12T07:17:13.571Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/3a/7fcf02178b8fad0a51e67e32765cd039ae505d054d744d76b8c2bbcba5ba/zope_interface-8.0.1.tar.gz", hash = "sha256:eba5610d042c3704a48222f7f7c6ab5b243ed26f917e2bc69379456b115e02d1", size = 253746, upload-time = "2025-09-25T05:55:51.285Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/6f/a16fc92b643313a55a0d2ccb040dd69048372f0a8f64107570256e664e5c/zope_interface-8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec1da7b9156ae000cea2d19bad83ddb5c50252f9d7b186da276d17768c67a3cb", size = 207652, upload-time = "2025-09-12T07:23:51.746Z" }, - { url = "https://files.pythonhosted.org/packages/01/0c/6bebd9417072c3eb6163228783cabb4890e738520b45562ade1cbf7d19d6/zope_interface-8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:160ba50022b342451baf516de3e3a2cd2d8c8dbac216803889a5eefa67083688", size = 208096, upload-time = "2025-09-12T07:23:52.895Z" }, - { url = "https://files.pythonhosted.org/packages/62/f1/03c4d2b70ce98828760dfc19f34be62526ea8b7f57160a009d338f396eb4/zope_interface-8.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:879bb5bf937cde4acd738264e87f03c7bf7d45478f7c8b9dc417182b13d81f6c", size = 254770, upload-time = "2025-09-12T07:58:18.379Z" }, - { url = "https://files.pythonhosted.org/packages/bb/73/06400c668d7d334d2296d23b3dacace43f45d6e721c6f6d08ea512703ede/zope_interface-8.0-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7fb931bf55c66a092c5fbfb82a0ff3cc3221149b185bde36f0afc48acb8dcd92", size = 259542, upload-time = "2025-09-12T08:00:27.632Z" }, - { url = "https://files.pythonhosted.org/packages/d9/28/565b5f41045aa520853410d33b420f605018207a854fba3d93ed85e7bef2/zope_interface-8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1858d1e5bb2c5ae766890708184a603eb484bb7454e306e967932a9f3c558b07", size = 260720, upload-time = "2025-09-12T08:29:19.238Z" }, - { url = "https://files.pythonhosted.org/packages/c5/46/6c6b0df12665fec622133932a361829b6e6fbe255e6ce01768eedbcb7fa0/zope_interface-8.0-cp311-cp311-win_amd64.whl", hash = "sha256:7e88c66ebedd1e839082f308b8372a50ef19423e01ee2e09600b80e765a10234", size = 211914, upload-time = "2025-09-12T07:23:19.858Z" }, - { url = "https://files.pythonhosted.org/packages/ae/42/9c79e4b2172e2584727cbc35bba1ea6884c15f1a77fe2b80ed8358893bb2/zope_interface-8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b80447a3a5c7347f4ebf3e50de319c8d2a5dabd7de32f20899ac50fc275b145d", size = 208359, upload-time = "2025-09-12T07:23:40.746Z" }, - { url = "https://files.pythonhosted.org/packages/d9/3a/77b5e3dbaced66141472faf788ea20e9b395076ea6fd30e2fde4597047b1/zope_interface-8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:67047a4470cb2fddb5ba5105b0160a1d1c30ce4b300cf264d0563136adac4eac", size = 208547, upload-time = "2025-09-12T07:23:42.088Z" }, - { url = "https://files.pythonhosted.org/packages/7c/d3/a920b3787373e717384ef5db2cafaae70d451b8850b9b4808c024867dd06/zope_interface-8.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:1bee9c1b42513148f98d3918affd829804a5c992c000c290dc805f25a75a6a3f", size = 258986, upload-time = "2025-09-12T07:58:20.681Z" }, - { url = "https://files.pythonhosted.org/packages/4d/37/c7f5b1ccfcbb0b90d57d02b5744460e9f77a84932689ca8d99a842f330b2/zope_interface-8.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:804ebacb2776eb89a57d9b5e9abec86930e0ee784a0005030801ae2f6c04d5d8", size = 264438, upload-time = "2025-09-12T08:00:28.921Z" }, - { url = "https://files.pythonhosted.org/packages/43/eb/fd6fefc92618bdf16fbfd71fb43ed206f99b8db5a0dd55797f4e33d7dd75/zope_interface-8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c4d9d3982aaa88b177812cd911ceaf5ffee4829e86ab3273c89428f2c0c32cc4", size = 263971, upload-time = "2025-09-12T08:29:20.693Z" }, - { url = "https://files.pythonhosted.org/packages/d9/ca/f99f4ef959b2541f0a3e05768d9ff48ad055d4bed00c7a438b088d54196a/zope_interface-8.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea1f2e47bc0124a03ee1e5fb31aee5dfde876244bcc552b9e3eb20b041b350d7", size = 212031, upload-time = "2025-09-12T07:23:04.755Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2f/c10c739bcb9b072090c97c2e08533777497190daa19d190d72b4cce9c7cb/zope_interface-8.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4bd01022d2e1bce4a4a4ed9549edb25393c92e607d7daa6deff843f1f68b479d", size = 207903, upload-time = "2025-09-25T05:58:21.671Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e1/9845ac3697f108d9a1af6912170c59a23732090bbfb35955fe77e5544955/zope_interface-8.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:29be8db8b712d94f1c05e24ea230a879271d787205ba1c9a6100d1d81f06c69a", size = 208345, upload-time = "2025-09-25T05:58:24.217Z" }, + { url = "https://files.pythonhosted.org/packages/f2/49/6573bc8b841cfab18e80c8e8259f1abdbbf716140011370de30231be79ad/zope_interface-8.0.1-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:51ae1b856565b30455b7879fdf0a56a88763b401d3f814fa9f9542d7410dbd7e", size = 255027, upload-time = "2025-09-25T05:58:19.975Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fd/908b0fd4b1ab6e412dfac9bd2b606f2893ef9ba3dd36d643f5e5b94c57b3/zope_interface-8.0.1-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d2e7596149cb1acd1d4d41b9f8fe2ffc0e9e29e2e91d026311814181d0d9efaf", size = 259800, upload-time = "2025-09-25T05:58:11.487Z" }, + { url = "https://files.pythonhosted.org/packages/dc/78/8419a2b4e88410520ed4b7f93bbd25a6d4ae66c4e2b131320f2b90f43077/zope_interface-8.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b2737c11c34fb9128816759864752d007ec4f987b571c934c30723ed881a7a4f", size = 260978, upload-time = "2025-09-25T06:26:24.483Z" }, + { url = "https://files.pythonhosted.org/packages/e5/90/caf68152c292f1810e2bd3acd2177badf08a740aa8a348714617d6c9ad0b/zope_interface-8.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:cf66e4bf731aa7e0ced855bb3670e8cda772f6515a475c6a107bad5cb6604103", size = 212155, upload-time = "2025-09-25T05:59:40.318Z" }, + { url = "https://files.pythonhosted.org/packages/dc/a6/0f08713ddda834c428ebf97b2a7fd8dea50c0100065a8955924dbd94dae8/zope_interface-8.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:115f27c1cc95ce7a517d960ef381beedb0a7ce9489645e80b9ab3cbf8a78799c", size = 208609, upload-time = "2025-09-25T05:58:53.698Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/d423045f54dc81e0991ec655041e7a0eccf6b2642535839dd364b35f4d7f/zope_interface-8.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af655c573b84e3cb6a4f6fd3fbe04e4dc91c63c6b6f99019b3713ef964e589bc", size = 208797, upload-time = "2025-09-25T05:58:56.258Z" }, + { url = "https://files.pythonhosted.org/packages/c6/43/39d4bb3f7a80ebd261446792493cfa4e198badd47107224f5b6fe1997ad9/zope_interface-8.0.1-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:23f82ef9b2d5370750cc1bf883c3b94c33d098ce08557922a3fbc7ff3b63dfe1", size = 259242, upload-time = "2025-09-25T05:58:21.602Z" }, + { url = "https://files.pythonhosted.org/packages/da/29/49effcff64ef30731e35520a152a9dfcafec86cf114b4c2aff942e8264ba/zope_interface-8.0.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35a1565d5244997f2e629c5c68715b3d9d9036e8df23c4068b08d9316dcb2822", size = 264696, upload-time = "2025-09-25T05:58:13.351Z" }, + { url = "https://files.pythonhosted.org/packages/c7/39/b947673ec9a258eeaa20208dd2f6127d9fbb3e5071272a674ebe02063a78/zope_interface-8.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:029ea1db7e855a475bf88d9910baab4e94d007a054810e9007ac037a91c67c6f", size = 264229, upload-time = "2025-09-25T06:26:26.226Z" }, + { url = "https://files.pythonhosted.org/packages/8f/ee/eed6efd1fc3788d1bef7a814e0592d8173b7fe601c699b935009df035fc2/zope_interface-8.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0beb3e7f7dc153944076fcaf717a935f68d39efa9fce96ec97bafcc0c2ea6cab", size = 212270, upload-time = "2025-09-25T05:58:53.584Z" }, ] [[package]] name = "zstandard" -version = "0.24.0" +version = "0.25.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/09/1b/c20b2ef1d987627765dcd5bf1dadb8ef6564f00a87972635099bb76b7a05/zstandard-0.24.0.tar.gz", hash = "sha256:fe3198b81c00032326342d973e526803f183f97aa9e9a98e3f897ebafe21178f", size = 905681, upload-time = "2025-08-17T18:36:36.352Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/1f/5c72806f76043c0ef9191a2b65281dacdf3b65b0828eb13bb2c987c4fb90/zstandard-0.24.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:addfc23e3bd5f4b6787b9ca95b2d09a1a67ad5a3c318daaa783ff90b2d3a366e", size = 795228, upload-time = "2025-08-17T18:21:46.978Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ba/3059bd5cd834666a789251d14417621b5c61233bd46e7d9023ea8bc1043a/zstandard-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b005bcee4be9c3984b355336283afe77b2defa76ed6b89332eced7b6fa68b68", size = 640520, upload-time = "2025-08-17T18:21:48.162Z" }, - { url = "https://files.pythonhosted.org/packages/57/07/f0e632bf783f915c1fdd0bf68614c4764cae9dd46ba32cbae4dd659592c3/zstandard-0.24.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:3f96a9130171e01dbb6c3d4d9925d604e2131a97f540e223b88ba45daf56d6fb", size = 5347682, upload-time = "2025-08-17T18:21:50.266Z" }, - { url = "https://files.pythonhosted.org/packages/a6/4c/63523169fe84773a7462cd090b0989cb7c7a7f2a8b0a5fbf00009ba7d74d/zstandard-0.24.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd0d3d16e63873253bad22b413ec679cf6586e51b5772eb10733899832efec42", size = 5057650, upload-time = "2025-08-17T18:21:52.634Z" }, - { url = "https://files.pythonhosted.org/packages/c6/16/49013f7ef80293f5cebf4c4229535a9f4c9416bbfd238560edc579815dbe/zstandard-0.24.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:b7a8c30d9bf4bd5e4dcfe26900bef0fcd9749acde45cdf0b3c89e2052fda9a13", size = 5404893, upload-time = "2025-08-17T18:21:54.54Z" }, - { url = "https://files.pythonhosted.org/packages/4d/38/78e8bcb5fc32a63b055f2b99e0be49b506f2351d0180173674f516cf8a7a/zstandard-0.24.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:52cd7d9fa0a115c9446abb79b06a47171b7d916c35c10e0c3aa6f01d57561382", size = 5452389, upload-time = "2025-08-17T18:21:56.822Z" }, - { url = "https://files.pythonhosted.org/packages/55/8a/81671f05619edbacd49bd84ce6899a09fc8299be20c09ae92f6618ccb92d/zstandard-0.24.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a0f6fc2ea6e07e20df48752e7700e02e1892c61f9a6bfbacaf2c5b24d5ad504b", size = 5558888, upload-time = "2025-08-17T18:21:58.68Z" }, - { url = "https://files.pythonhosted.org/packages/49/cc/e83feb2d7d22d1f88434defbaeb6e5e91f42a4f607b5d4d2d58912b69d67/zstandard-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e46eb6702691b24ddb3e31e88b4a499e31506991db3d3724a85bd1c5fc3cfe4e", size = 5048038, upload-time = "2025-08-17T18:22:00.642Z" }, - { url = "https://files.pythonhosted.org/packages/08/c3/7a5c57ff49ef8943877f85c23368c104c2aea510abb339a2dc31ad0a27c3/zstandard-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5e3b9310fd7f0d12edc75532cd9a56da6293840c84da90070d692e0bb15f186", size = 5573833, upload-time = "2025-08-17T18:22:02.402Z" }, - { url = "https://files.pythonhosted.org/packages/f9/00/64519983cd92535ba4bdd4ac26ac52db00040a52d6c4efb8d1764abcc343/zstandard-0.24.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76cdfe7f920738ea871f035568f82bad3328cbc8d98f1f6988264096b5264efd", size = 4961072, upload-time = "2025-08-17T18:22:04.384Z" }, - { url = "https://files.pythonhosted.org/packages/72/ab/3a08a43067387d22994fc87c3113636aa34ccd2914a4d2d188ce365c5d85/zstandard-0.24.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3f2fe35ec84908dddf0fbf66b35d7c2878dbe349552dd52e005c755d3493d61c", size = 5268462, upload-time = "2025-08-17T18:22:06.095Z" }, - { url = "https://files.pythonhosted.org/packages/49/cf/2abb3a1ad85aebe18c53e7eca73223f1546ddfa3bf4d2fb83fc5a064c5ca/zstandard-0.24.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:aa705beb74ab116563f4ce784fa94771f230c05d09ab5de9c397793e725bb1db", size = 5443319, upload-time = "2025-08-17T18:22:08.572Z" }, - { url = "https://files.pythonhosted.org/packages/40/42/0dd59fc2f68f1664cda11c3b26abdf987f4e57cb6b6b0f329520cd074552/zstandard-0.24.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:aadf32c389bb7f02b8ec5c243c38302b92c006da565e120dfcb7bf0378f4f848", size = 5822355, upload-time = "2025-08-17T18:22:10.537Z" }, - { url = "https://files.pythonhosted.org/packages/99/c0/ea4e640fd4f7d58d6f87a1e7aca11fb886ac24db277fbbb879336c912f63/zstandard-0.24.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e40cd0fc734aa1d4bd0e7ad102fd2a1aefa50ce9ef570005ffc2273c5442ddc3", size = 5365257, upload-time = "2025-08-17T18:22:13.159Z" }, - { url = "https://files.pythonhosted.org/packages/27/a9/92da42a5c4e7e4003271f2e1f0efd1f37cfd565d763ad3604e9597980a1c/zstandard-0.24.0-cp311-cp311-win32.whl", hash = "sha256:cda61c46343809ecda43dc620d1333dd7433a25d0a252f2dcc7667f6331c7b61", size = 435559, upload-time = "2025-08-17T18:22:17.29Z" }, - { url = "https://files.pythonhosted.org/packages/e2/8e/2c8e5c681ae4937c007938f954a060fa7c74f36273b289cabdb5ef0e9a7e/zstandard-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:3b95fc06489aa9388400d1aab01a83652bc040c9c087bd732eb214909d7fb0dd", size = 505070, upload-time = "2025-08-17T18:22:14.808Z" }, - { url = "https://files.pythonhosted.org/packages/52/10/a2f27a66bec75e236b575c9f7b0d7d37004a03aa2dcde8e2decbe9ed7b4d/zstandard-0.24.0-cp311-cp311-win_arm64.whl", hash = "sha256:ad9fd176ff6800a0cf52bcf59c71e5de4fa25bf3ba62b58800e0f84885344d34", size = 461507, upload-time = "2025-08-17T18:22:15.964Z" }, - { url = "https://files.pythonhosted.org/packages/26/e9/0bd281d9154bba7fc421a291e263911e1d69d6951aa80955b992a48289f6/zstandard-0.24.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a2bda8f2790add22773ee7a4e43c90ea05598bffc94c21c40ae0a9000b0133c3", size = 795710, upload-time = "2025-08-17T18:22:19.189Z" }, - { url = "https://files.pythonhosted.org/packages/36/26/b250a2eef515caf492e2d86732e75240cdac9d92b04383722b9753590c36/zstandard-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cc76de75300f65b8eb574d855c12518dc25a075dadb41dd18f6322bda3fe15d5", size = 640336, upload-time = "2025-08-17T18:22:20.466Z" }, - { url = "https://files.pythonhosted.org/packages/79/bf/3ba6b522306d9bf097aac8547556b98a4f753dc807a170becaf30dcd6f01/zstandard-0.24.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d2b3b4bda1a025b10fe0269369475f420177f2cb06e0f9d32c95b4873c9f80b8", size = 5342533, upload-time = "2025-08-17T18:22:22.326Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ec/22bc75bf054e25accdf8e928bc68ab36b4466809729c554ff3a1c1c8bce6/zstandard-0.24.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b84c6c210684286e504022d11ec294d2b7922d66c823e87575d8b23eba7c81f", size = 5062837, upload-time = "2025-08-17T18:22:24.416Z" }, - { url = "https://files.pythonhosted.org/packages/48/cc/33edfc9d286e517fb5b51d9c3210e5bcfce578d02a675f994308ca587ae1/zstandard-0.24.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c59740682a686bf835a1a4d8d0ed1eefe31ac07f1c5a7ed5f2e72cf577692b00", size = 5393855, upload-time = "2025-08-17T18:22:26.786Z" }, - { url = "https://files.pythonhosted.org/packages/73/36/59254e9b29da6215fb3a717812bf87192d89f190f23817d88cb8868c47ac/zstandard-0.24.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6324fde5cf5120fbf6541d5ff3c86011ec056e8d0f915d8e7822926a5377193a", size = 5451058, upload-time = "2025-08-17T18:22:28.885Z" }, - { url = "https://files.pythonhosted.org/packages/9a/c7/31674cb2168b741bbbe71ce37dd397c9c671e73349d88ad3bca9e9fae25b/zstandard-0.24.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:51a86bd963de3f36688553926a84e550d45d7f9745bd1947d79472eca27fcc75", size = 5546619, upload-time = "2025-08-17T18:22:31.115Z" }, - { url = "https://files.pythonhosted.org/packages/e6/01/1a9f22239f08c00c156f2266db857545ece66a6fc0303d45c298564bc20b/zstandard-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d82ac87017b734f2fb70ff93818c66f0ad2c3810f61040f077ed38d924e19980", size = 5046676, upload-time = "2025-08-17T18:22:33.077Z" }, - { url = "https://files.pythonhosted.org/packages/a7/91/6c0cf8fa143a4988a0361380ac2ef0d7cb98a374704b389fbc38b5891712/zstandard-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92ea7855d5bcfb386c34557516c73753435fb2d4a014e2c9343b5f5ba148b5d8", size = 5576381, upload-time = "2025-08-17T18:22:35.391Z" }, - { url = "https://files.pythonhosted.org/packages/e2/77/1526080e22e78871e786ccf3c84bf5cec9ed25110a9585507d3c551da3d6/zstandard-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3adb4b5414febf074800d264ddf69ecade8c658837a83a19e8ab820e924c9933", size = 4953403, upload-time = "2025-08-17T18:22:37.266Z" }, - { url = "https://files.pythonhosted.org/packages/6e/d0/a3a833930bff01eab697eb8abeafb0ab068438771fa066558d96d7dafbf9/zstandard-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6374feaf347e6b83ec13cc5dcfa70076f06d8f7ecd46cc71d58fac798ff08b76", size = 5267396, upload-time = "2025-08-17T18:22:39.757Z" }, - { url = "https://files.pythonhosted.org/packages/f3/5e/90a0db9a61cd4769c06374297ecfcbbf66654f74cec89392519deba64d76/zstandard-0.24.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:13fc548e214df08d896ee5f29e1f91ee35db14f733fef8eabea8dca6e451d1e2", size = 5433269, upload-time = "2025-08-17T18:22:42.131Z" }, - { url = "https://files.pythonhosted.org/packages/ce/58/fc6a71060dd67c26a9c5566e0d7c99248cbe5abfda6b3b65b8f1a28d59f7/zstandard-0.24.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0a416814608610abf5488889c74e43ffa0343ca6cf43957c6b6ec526212422da", size = 5814203, upload-time = "2025-08-17T18:22:44.017Z" }, - { url = "https://files.pythonhosted.org/packages/5c/6a/89573d4393e3ecbfa425d9a4e391027f58d7810dec5cdb13a26e4cdeef5c/zstandard-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0d66da2649bb0af4471699aeb7a83d6f59ae30236fb9f6b5d20fb618ef6c6777", size = 5359622, upload-time = "2025-08-17T18:22:45.802Z" }, - { url = "https://files.pythonhosted.org/packages/60/ff/2cbab815d6f02a53a9d8d8703bc727d8408a2e508143ca9af6c3cca2054b/zstandard-0.24.0-cp312-cp312-win32.whl", hash = "sha256:ff19efaa33e7f136fe95f9bbcc90ab7fb60648453b03f95d1de3ab6997de0f32", size = 435968, upload-time = "2025-08-17T18:22:49.493Z" }, - { url = "https://files.pythonhosted.org/packages/ce/a3/8f96b8ddb7ad12344218fbd0fd2805702dafd126ae9f8a1fb91eef7b33da/zstandard-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc05f8a875eb651d1cc62e12a4a0e6afa5cd0cc231381adb830d2e9c196ea895", size = 505195, upload-time = "2025-08-17T18:22:47.193Z" }, - { url = "https://files.pythonhosted.org/packages/a3/4a/bfca20679da63bfc236634ef2e4b1b4254203098b0170e3511fee781351f/zstandard-0.24.0-cp312-cp312-win_arm64.whl", hash = "sha256:b04c94718f7a8ed7cdd01b162b6caa1954b3c9d486f00ecbbd300f149d2b2606", size = 461605, upload-time = "2025-08-17T18:22:48.317Z" }, + { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" }, + { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" }, + { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126, upload-time = "2025-09-14T22:16:31.811Z" }, + { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390, upload-time = "2025-09-14T22:16:33.486Z" }, + { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914, upload-time = "2025-09-14T22:16:35.277Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635, upload-time = "2025-09-14T22:16:37.141Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277, upload-time = "2025-09-14T22:16:38.807Z" }, + { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377, upload-time = "2025-09-14T22:16:40.523Z" }, + { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493, upload-time = "2025-09-14T22:16:43.3Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018, upload-time = "2025-09-14T22:16:45.292Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672, upload-time = "2025-09-14T22:16:47.076Z" }, + { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753, upload-time = "2025-09-14T22:16:49.316Z" }, + { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047, upload-time = "2025-09-14T22:16:51.328Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484, upload-time = "2025-09-14T22:16:55.005Z" }, + { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183, upload-time = "2025-09-14T22:16:52.753Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533, upload-time = "2025-09-14T22:16:53.878Z" }, + { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" }, + { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" }, + { url = "https://files.pythonhosted.org/packages/1e/15/efef5a2f204a64bdb5571e6161d49f7ef0fffdbca953a615efbec045f60f/zstandard-0.25.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea", size = 5063012, upload-time = "2025-09-14T22:17:01.156Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/a6ce629ffdb43959e92e87ebdaeebb5ac81c944b6a75c9c47e300f85abdf/zstandard-0.25.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb", size = 5394148, upload-time = "2025-09-14T22:17:03.091Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/2bf870b3abeb5c070fe2d670a5a8d1057a8270f125ef7676d29ea900f496/zstandard-0.25.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a", size = 5451652, upload-time = "2025-09-14T22:17:04.979Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/7be26e610767316c028a2cbedb9a3beabdbe33e2182c373f71a1c0b88f36/zstandard-0.25.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902", size = 5546993, upload-time = "2025-09-14T22:17:06.781Z" }, + { url = "https://files.pythonhosted.org/packages/85/c7/3483ad9ff0662623f3648479b0380d2de5510abf00990468c286c6b04017/zstandard-0.25.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f", size = 5046806, upload-time = "2025-09-14T22:17:08.415Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/206883dd25b8d1591a1caa44b54c2aad84badccf2f1de9e2d60a446f9a25/zstandard-0.25.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b", size = 5576659, upload-time = "2025-09-14T22:17:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/9d/31/76c0779101453e6c117b0ff22565865c54f48f8bd807df2b00c2c404b8e0/zstandard-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6", size = 4953933, upload-time = "2025-09-14T22:17:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/18/e1/97680c664a1bf9a247a280a053d98e251424af51f1b196c6d52f117c9720/zstandard-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91", size = 5268008, upload-time = "2025-09-14T22:17:13.627Z" }, + { url = "https://files.pythonhosted.org/packages/1e/73/316e4010de585ac798e154e88fd81bb16afc5c5cb1a72eeb16dd37e8024a/zstandard-0.25.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708", size = 5433517, upload-time = "2025-09-14T22:17:16.103Z" }, + { url = "https://files.pythonhosted.org/packages/5b/60/dd0f8cfa8129c5a0ce3ea6b7f70be5b33d2618013a161e1ff26c2b39787c/zstandard-0.25.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512", size = 5814292, upload-time = "2025-09-14T22:17:17.827Z" }, + { url = "https://files.pythonhosted.org/packages/fc/5f/75aafd4b9d11b5407b641b8e41a57864097663699f23e9ad4dbb91dc6bfe/zstandard-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa", size = 5360237, upload-time = "2025-09-14T22:17:19.954Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8d/0309daffea4fcac7981021dbf21cdb2e3427a9e76bafbcdbdf5392ff99a4/zstandard-0.25.0-cp312-cp312-win32.whl", hash = "sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd", size = 436922, upload-time = "2025-09-14T22:17:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/79/3b/fa54d9015f945330510cb5d0b0501e8253c127cca7ebe8ba46a965df18c5/zstandard-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01", size = 506276, upload-time = "2025-09-14T22:17:21.429Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6b/8b51697e5319b1f9ac71087b0af9a40d8a6288ff8025c36486e0c12abcc4/zstandard-0.25.0-cp312-cp312-win_arm64.whl", hash = "sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9", size = 462679, upload-time = "2025-09-14T22:17:23.147Z" }, ] diff --git a/dev/pytest/pytest_artifacts.sh b/dev/pytest/pytest_artifacts.sh index 3086ef5cc4..29cacdcc07 100755 --- a/dev/pytest/pytest_artifacts.sh +++ b/dev/pytest/pytest_artifacts.sh @@ -4,4 +4,6 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." -pytest api/tests/artifact_tests/ +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-120}" + +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/artifact_tests/ diff --git a/dev/pytest/pytest_model_runtime.sh b/dev/pytest/pytest_model_runtime.sh index 2cbbbbfd81..fd68dbe697 100755 --- a/dev/pytest/pytest_model_runtime.sh +++ b/dev/pytest/pytest_model_runtime.sh @@ -4,7 +4,9 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." -pytest api/tests/integration_tests/model_runtime/anthropic \ +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-180}" + +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/integration_tests/model_runtime/anthropic \ api/tests/integration_tests/model_runtime/azure_openai \ api/tests/integration_tests/model_runtime/openai api/tests/integration_tests/model_runtime/chatglm \ api/tests/integration_tests/model_runtime/google api/tests/integration_tests/model_runtime/xinference \ diff --git a/dev/pytest/pytest_testcontainers.sh b/dev/pytest/pytest_testcontainers.sh index e55a436138..f92f8821bf 100755 --- a/dev/pytest/pytest_testcontainers.sh +++ b/dev/pytest/pytest_testcontainers.sh @@ -4,4 +4,6 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." -pytest api/tests/test_containers_integration_tests +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-120}" + +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/test_containers_integration_tests diff --git a/dev/pytest/pytest_tools.sh b/dev/pytest/pytest_tools.sh index d10934626f..989784f078 100755 --- a/dev/pytest/pytest_tools.sh +++ b/dev/pytest/pytest_tools.sh @@ -4,4 +4,6 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." -pytest api/tests/integration_tests/tools +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-120}" + +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/integration_tests/tools diff --git a/dev/pytest/pytest_unit_tests.sh b/dev/pytest/pytest_unit_tests.sh index 1a1819ca28..496cb40952 100755 --- a/dev/pytest/pytest_unit_tests.sh +++ b/dev/pytest/pytest_unit_tests.sh @@ -4,5 +4,7 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-20}" + # libs -pytest api/tests/unit_tests +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/unit_tests diff --git a/dev/pytest/pytest_vdb.sh b/dev/pytest/pytest_vdb.sh index 7f617a9c05..3c11a079cc 100755 --- a/dev/pytest/pytest_vdb.sh +++ b/dev/pytest/pytest_vdb.sh @@ -4,7 +4,9 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." -pytest api/tests/integration_tests/vdb/chroma \ +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-180}" + +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/integration_tests/vdb/chroma \ api/tests/integration_tests/vdb/milvus \ api/tests/integration_tests/vdb/pgvecto_rs \ api/tests/integration_tests/vdb/pgvector \ diff --git a/dev/pytest/pytest_workflow.sh b/dev/pytest/pytest_workflow.sh index b63d49069f..941c8d3e7e 100755 --- a/dev/pytest/pytest_workflow.sh +++ b/dev/pytest/pytest_workflow.sh @@ -4,4 +4,6 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/../.." -pytest api/tests/integration_tests/workflow +PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-120}" + +pytest --timeout "${PYTEST_TIMEOUT}" api/tests/integration_tests/workflow diff --git a/dev/start-worker b/dev/start-worker index a2af04c01c..a7f16b853f 100755 --- a/dev/start-worker +++ b/dev/start-worker @@ -5,7 +5,6 @@ set -x SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/.." - uv --directory api run \ - celery -A app.celery worker \ - -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation + celery -A app.celery worker \ + -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 5253f750b9..5a67c080cc 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -24,6 +24,13 @@ services: volumes: # Mount the storage directory to the container, for storing user files. - ./volumes/app/storage:/app/api/storage + # TODO: Remove this entrypoint override when weaviate-client 4.17.0 is included in the next Dify release + entrypoint: + - /bin/bash + - -c + - | + uv pip install --system weaviate-client==4.17.0 + exec /bin/bash /app/api/docker/entrypoint.sh networks: - ssrf_proxy_network - default @@ -51,6 +58,13 @@ services: volumes: # Mount the storage directory to the container, for storing user files. - ./volumes/app/storage:/app/api/storage + # TODO: Remove this entrypoint override when weaviate-client 4.17.0 is included in the next Dify release + entrypoint: + - /bin/bash + - -c + - | + uv pip install --system weaviate-client==4.17.0 + exec /bin/bash /app/api/docker/entrypoint.sh networks: - ssrf_proxy_network - default @@ -329,9 +343,8 @@ services: # The Weaviate vector store. weaviate: - image: semitechnologies/weaviate:1.19.0 + image: semitechnologies/weaviate:1.27.0 profiles: - - "" - weaviate restart: always volumes: diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index d350503f27..ebc619a50f 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -181,7 +181,7 @@ services: # The Weaviate vector store. weaviate: - image: semitechnologies/weaviate:1.19.0 + image: semitechnologies/weaviate:1.27.0 profiles: - "" - weaviate @@ -206,6 +206,7 @@ services: AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai} ports: - "${EXPOSE_WEAVIATE_PORT:-8080}:8080" + - "${EXPOSE_WEAVIATE_GRPC_PORT:-50051}:50051" networks: # create a network between sandbox, api and ssrf_proxy, and can not access outside. diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 0df648f38f..421b733e2b 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -631,6 +631,13 @@ services: volumes: # Mount the storage directory to the container, for storing user files. - ./volumes/app/storage:/app/api/storage + # TODO: Remove this entrypoint override when weaviate-client 4.17.0 is included in the next Dify release + entrypoint: + - /bin/bash + - -c + - | + uv pip install --system weaviate-client==4.17.0 + exec /bin/bash /app/api/docker/entrypoint.sh networks: - ssrf_proxy_network - default @@ -658,6 +665,13 @@ services: volumes: # Mount the storage directory to the container, for storing user files. - ./volumes/app/storage:/app/api/storage + # TODO: Remove this entrypoint override when weaviate-client 4.17.0 is included in the next Dify release + entrypoint: + - /bin/bash + - -c + - | + uv pip install --system weaviate-client==4.17.0 + exec /bin/bash /app/api/docker/entrypoint.sh networks: - ssrf_proxy_network - default @@ -936,9 +950,8 @@ services: # The Weaviate vector store. weaviate: - image: semitechnologies/weaviate:1.19.0 + image: semitechnologies/weaviate:1.27.0 profiles: - - "" - weaviate restart: always volumes: diff --git a/docs/ar-SA/README.md b/docs/ar-SA/README.md index afa494c5d3..30920ed983 100644 --- a/docs/ar-SA/README.md +++ b/docs/ar-SA/README.md @@ -115,6 +115,14 @@ docker compose up -d إذا كنت بحاجة إلى تخصيص الإعدادات، فيرجى الرجوع إلى التعليقات في ملف [.env.example](../../docker/.env.example) وتحديث القيم المقابلة في ملف `.env`. بالإضافة إلى ذلك، قد تحتاج إلى إجراء تعديلات على ملف `docker-compose.yaml` نفسه، مثل تغيير إصدارات الصور أو تعيينات المنافذ أو نقاط تحميل وحدات التخزين، بناءً على بيئة النشر ومتطلباتك الخاصة. بعد إجراء أي تغييرات، يرجى إعادة تشغيل `docker-compose up -d`. يمكنك العثور على قائمة كاملة بمتغيرات البيئة المتاحة [هنا](https://docs.dify.ai/getting-started/install-self-hosted/environments). +### مراقبة المقاييس باستخدام Grafana + +استيراد لوحة التحكم إلى Grafana، باستخدام قاعدة بيانات PostgreSQL الخاصة بـ Dify كمصدر للبيانات، لمراقبة المقاييس بدقة للتطبيقات والمستأجرين والرسائل وغير ذلك. + +- [لوحة تحكم Grafana بواسطة @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### النشر باستخدام Kubernetes + يوجد مجتمع خاص بـ [Helm Charts](https://helm.sh/) وملفات YAML التي تسمح بتنفيذ Dify على Kubernetes للنظام من الإيجابيات العلوية. - [رسم بياني Helm من قبل @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/docs/bn-BD/README.md b/docs/bn-BD/README.md index 318853a8de..5430364ef9 100644 --- a/docs/bn-BD/README.md +++ b/docs/bn-BD/README.md @@ -132,6 +132,14 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন যদি আপনার কনফিগারেশনটি কাস্টমাইজ করার প্রয়োজন হয়, তাহলে অনুগ্রহ করে আমাদের [.env.example](../../docker/.env.example) ফাইল দেখুন এবং আপনার `.env` ফাইলে সংশ্লিষ্ট মানগুলি আপডেট করুন। এছাড়াও, আপনার নির্দিষ্ট এনভায়রনমেন্ট এবং প্রয়োজনীয়তার উপর ভিত্তি করে আপনাকে `docker-compose.yaml` ফাইলে সমন্বয় করতে হতে পারে, যেমন ইমেজ ভার্সন পরিবর্তন করা, পোর্ট ম্যাপিং করা, অথবা ভলিউম মাউন্ট করা। যেকোনো পরিবর্তন করার পর, অনুগ্রহ করে `docker-compose up -d` পুনরায় চালান। ভেরিয়েবলের সম্পূর্ণ তালিকা [এখানে] (https://docs.dify.ai/getting-started/install-self-hosted/environments) খুঁজে পেতে পারেন। +### Grafana দিয়ে মেট্রিক্স মনিটরিং + +Dify-এর PostgreSQL ডাটাবেসকে ডেটা সোর্স হিসাবে ব্যবহার করে, অ্যাপ, টেন্যান্ট, মেসেজ ইত্যাদির গ্র্যানুলারিটিতে মেট্রিক্স মনিটর করার জন্য Grafana-তে ড্যাশবোর্ড ইম্পোর্ট করুন। + +- [@bowenliang123 কর্তৃক Grafana ড্যাশবোর্ড](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Kubernetes এর সাথে ডেপ্লয়মেন্ট + যদি আপনি একটি হাইলি এভেইলেবল সেটআপ কনফিগার করতে চান, তাহলে কমিউনিটি [Helm Charts](https://helm.sh/) এবং YAML ফাইল রয়েছে যা Dify কে Kubernetes-এ ডিপ্লয় করার প্রক্রিয়া বর্ণনা করে। - [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/docs/de-DE/README.md b/docs/de-DE/README.md index 8907d914d3..6c49fbdfc3 100644 --- a/docs/de-DE/README.md +++ b/docs/de-DE/README.md @@ -130,6 +130,14 @@ Star Dify auf GitHub und lassen Sie sich sofort über neue Releases benachrichti Falls Sie die Konfiguration anpassen müssen, lesen Sie bitte die Kommentare in unserer [.env.example](../../docker/.env.example)-Datei und aktualisieren Sie die entsprechenden Werte in Ihrer `.env`-Datei. Zusätzlich müssen Sie eventuell Anpassungen an der `docker-compose.yaml`-Datei vornehmen, wie zum Beispiel das Ändern von Image-Versionen, Portzuordnungen oder Volumen-Mounts, je nach Ihrer spezifischen Einsatzumgebung und Ihren Anforderungen. Nachdem Sie Änderungen vorgenommen haben, starten Sie `docker-compose up -d` erneut. Eine vollständige Liste der verfügbaren Umgebungsvariablen finden Sie [hier](https://docs.dify.ai/getting-started/install-self-hosted/environments). +### Metriküberwachung mit Grafana + +Importieren Sie das Dashboard in Grafana, wobei Sie die PostgreSQL-Datenbank von Dify als Datenquelle verwenden, um Metriken in der Granularität von Apps, Mandanten, Nachrichten und mehr zu überwachen. + +- [Grafana-Dashboard von @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Bereitstellung mit Kubernetes + Falls Sie eine hochverfügbare Konfiguration einrichten möchten, gibt es von der Community bereitgestellte [Helm Charts](https://helm.sh/) und YAML-Dateien, die es ermöglichen, Dify auf Kubernetes bereitzustellen. - [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/docs/es-ES/README.md b/docs/es-ES/README.md index b005691fea..ae83d416e3 100644 --- a/docs/es-ES/README.md +++ b/docs/es-ES/README.md @@ -128,6 +128,14 @@ Si necesita personalizar la configuración, consulte los comentarios en nuestro . Después de realizar los cambios, ejecuta `docker-compose up -d` nuevamente. Puedes ver la lista completa de variables de entorno [aquí](https://docs.dify.ai/getting-started/install-self-hosted/environments). +### Monitorización de Métricas con Grafana + +Importe el panel a Grafana, utilizando la base de datos PostgreSQL de Dify como fuente de datos, para monitorizar métricas en granularidad de aplicaciones, inquilinos, mensajes y más. + +- [Panel de Grafana por @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Implementación con Kubernetes + Si desea configurar una configuración de alta disponibilidad, la comunidad proporciona [Gráficos Helm](https://helm.sh/) y archivos YAML, a través de los cuales puede desplegar Dify en Kubernetes. - [Gráfico Helm por @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/docs/fr-FR/README.md b/docs/fr-FR/README.md index 3aca9a9672..b7d006a927 100644 --- a/docs/fr-FR/README.md +++ b/docs/fr-FR/README.md @@ -126,6 +126,14 @@ Après l'exécution, vous pouvez accéder au tableau de bord Dify dans votre nav Si vous devez personnaliser la configuration, veuillez vous référer aux commentaires dans notre fichier [.env.example](../../docker/.env.example) et mettre à jour les valeurs correspondantes dans votre fichier `.env`. De plus, vous devrez peut-être apporter des modifications au fichier `docker-compose.yaml` lui-même, comme changer les versions d'image, les mappages de ports ou les montages de volumes, en fonction de votre environnement de déploiement et de vos exigences spécifiques. Après avoir effectué des modifications, veuillez réexécuter `docker-compose up -d`. Vous pouvez trouver la liste complète des variables d'environnement disponibles [ici](https://docs.dify.ai/getting-started/install-self-hosted/environments). +### Surveillance des Métriques avec Grafana + +Importez le tableau de bord dans Grafana, en utilisant la base de données PostgreSQL de Dify comme source de données, pour surveiller les métriques avec une granularité d'applications, de locataires, de messages et plus. + +- [Tableau de bord Grafana par @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Déploiement avec Kubernetes + Si vous souhaitez configurer une configuration haute disponibilité, la communauté fournit des [Helm Charts](https://helm.sh/) et des fichiers YAML, à travers lesquels vous pouvez déployer Dify sur Kubernetes. - [Helm Chart par @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/docs/ja-JP/README.md b/docs/ja-JP/README.md index 66831285d6..f9e700d1df 100644 --- a/docs/ja-JP/README.md +++ b/docs/ja-JP/README.md @@ -127,6 +127,14 @@ docker compose up -d 設定をカスタマイズする必要がある場合は、[.env.example](../../docker/.env.example) ファイルのコメントを参照し、`.env` ファイルの対応する値を更新してください。さらに、デプロイ環境や要件に応じて、`docker-compose.yaml` ファイル自体を調整する必要がある場合があります。たとえば、イメージのバージョン、ポートのマッピング、ボリュームのマウントなどを変更します。変更を加えた後は、`docker-compose up -d` を再実行してください。利用可能な環境変数の全一覧は、[こちら](https://docs.dify.ai/getting-started/install-self-hosted/environments)で確認できます。 +### Grafanaを使用したメトリクス監視 + +Grafanaにダッシュボードをインポートし、DifyのPostgreSQLデータベースをデータソースとして使用して、アプリ、テナント、メッセージなどの粒度でメトリクスを監視します。 + +- [@bowenliang123によるGrafanaダッシュボード](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Kubernetesでのデプロイ + 高可用性設定を設定する必要がある場合、コミュニティは[Helm Charts](https://helm.sh/)とYAMLファイルにより、DifyをKubernetesにデプロイすることができます。 - [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/docs/ko-KR/README.md b/docs/ko-KR/README.md index ec67bc90ed..4e4b82e920 100644 --- a/docs/ko-KR/README.md +++ b/docs/ko-KR/README.md @@ -120,6 +120,14 @@ docker compose up -d 구성을 사용자 정의해야 하는 경우 [.env.example](../../docker/.env.example) 파일의 주석을 참조하고 `.env` 파일에서 해당 값을 업데이트하십시오. 또한 특정 배포 환경 및 요구 사항에 따라 `docker-compose.yaml` 파일 자체를 조정해야 할 수도 있습니다. 예를 들어 이미지 버전, 포트 매핑 또는 볼륨 마운트를 변경합니다. 변경 한 후 `docker-compose up -d`를 다시 실행하십시오. 사용 가능한 환경 변수의 전체 목록은 [여기](https://docs.dify.ai/getting-started/install-self-hosted/environments)에서 찾을 수 있습니다. +### Grafana를 사용한 메트릭 모니터링 + +Dify의 PostgreSQL 데이터베이스를 데이터 소스로 사용하여 앱, 테넌트, 메시지 등에 대한 세분화된 메트릭을 모니터링하기 위해 대시보드를 Grafana로 가져옵니다. + +- [@bowenliang123의 Grafana 대시보드](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Kubernetes를 통한 배포 + Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했다는 커뮤니티가 제공하는 [Helm Charts](https://helm.sh/)와 YAML 파일이 존재합니다. - [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/docs/pt-BR/README.md b/docs/pt-BR/README.md index 78383a3c76..f96b18eabb 100644 --- a/docs/pt-BR/README.md +++ b/docs/pt-BR/README.md @@ -126,6 +126,14 @@ Após a execução, você pode acessar o painel do Dify no navegador em [http:// Se precisar personalizar a configuração, consulte os comentários no nosso arquivo [.env.example](../../docker/.env.example) e atualize os valores correspondentes no seu arquivo `.env`. Além disso, talvez seja necessário fazer ajustes no próprio arquivo `docker-compose.yaml`, como alterar versões de imagem, mapeamentos de portas ou montagens de volumes, com base no seu ambiente de implantação específico e nas suas necessidades. Após fazer quaisquer alterações, execute novamente `docker-compose up -d`. Você pode encontrar a lista completa de variáveis de ambiente disponíveis [aqui](https://docs.dify.ai/getting-started/install-self-hosted/environments). +### Monitoramento de Métricas com Grafana + +Importe o dashboard para o Grafana, usando o banco de dados PostgreSQL do Dify como fonte de dados, para monitorar métricas na granularidade de aplicativos, inquilinos, mensagens e muito mais. + +- [Dashboard do Grafana por @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Implantação com Kubernetes + Se deseja configurar uma instalação de alta disponibilidade, há [Helm Charts](https://helm.sh/) e arquivos YAML contribuídos pela comunidade que permitem a implantação do Dify no Kubernetes. - [Helm Chart de @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/docs/sl-SI/README.md b/docs/sl-SI/README.md index 65aedb7703..04dc3b5dff 100644 --- a/docs/sl-SI/README.md +++ b/docs/sl-SI/README.md @@ -128,6 +128,14 @@ Star Dify on GitHub and be instantly notified of new releases. Če morate prilagoditi konfiguracijo, si oglejte komentarje v naši datoteki .env.example in posodobite ustrezne vrednosti v svoji .env datoteki. Poleg tega boste morda morali prilagoditi docker-compose.yamlsamo datoteko, na primer spremeniti različice slike, preslikave vrat ali namestitve nosilca, glede na vaše specifično okolje in zahteve za uvajanje. Po kakršnih koli spremembah ponovno zaženite docker-compose up -d. Celoten seznam razpoložljivih spremenljivk okolja najdete tukaj . +### Spremljanje metrik z Grafana + +Uvoz nadzorne plošče v Grafana, z uporabo Difyjeve PostgreSQL baze podatkov kot vir podatkov, za spremljanje metrike glede na podrobnost aplikacij, najemnikov, sporočil in drugega. + +- [Nadzorna plošča Grafana avtorja @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Namestitev s Kubernetes + Če želite konfigurirati visoko razpoložljivo nastavitev, so na voljo Helm Charts in datoteke YAML, ki jih prispeva skupnost, ki omogočajo uvedbo Difyja v Kubernetes. - [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/docs/tr-TR/README.md b/docs/tr-TR/README.md index a044da1f4e..965a1704be 100644 --- a/docs/tr-TR/README.md +++ b/docs/tr-TR/README.md @@ -120,6 +120,14 @@ docker compose up -d Yapılandırmayı özelleştirmeniz gerekiyorsa, lütfen [.env.example](../../docker/.env.example) dosyamızdaki yorumlara bakın ve `.env` dosyanızdaki ilgili değerleri güncelleyin. Ayrıca, spesifik dağıtım ortamınıza ve gereksinimlerinize bağlı olarak `docker-compose.yaml` dosyasının kendisinde de, imaj sürümlerini, port eşlemelerini veya hacim bağlantılarını değiştirmek gibi ayarlamalar yapmanız gerekebilir. Herhangi bir değişiklik yaptıktan sonra, lütfen `docker-compose up -d` komutunu tekrar çalıştırın. Kullanılabilir tüm ortam değişkenlerinin tam listesini [burada](https://docs.dify.ai/getting-started/install-self-hosted/environments) bulabilirsiniz. +### Grafana ile Metrik İzleme + +Uygulamalar, kiracılar, mesajlar ve daha fazlasının granularitesinde metrikleri izlemek için Dify'nin PostgreSQL veritabanını veri kaynağı olarak kullanarak panoyu Grafana'ya aktarın. + +- [@bowenliang123 tarafından Grafana Panosu](%E9%93%BE%E6%8E%A5) + +### Kubernetes ile Dağıtım + Yüksek kullanılabilirliğe sahip bir kurulum yapılandırmak isterseniz, Dify'ın Kubernetes üzerine dağıtılmasına olanak tanıyan topluluk katkılı [Helm Charts](https://helm.sh/) ve YAML dosyaları mevcuttur. - [@LeoQuote tarafından Helm Chart](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/docs/vi-VN/README.md b/docs/vi-VN/README.md index 847641da12..51f7c5d994 100644 --- a/docs/vi-VN/README.md +++ b/docs/vi-VN/README.md @@ -121,6 +121,14 @@ Sau khi chạy, bạn có thể truy cập bảng điều khiển Dify trong tr Nếu bạn cần tùy chỉnh cấu hình, vui lòng tham khảo các nhận xét trong tệp [.env.example](../../docker/.env.example) của chúng tôi và cập nhật các giá trị tương ứng trong tệp `.env` của bạn. Ngoài ra, bạn có thể cần điều chỉnh tệp `docker-compose.yaml`, chẳng hạn như thay đổi phiên bản hình ảnh, ánh xạ cổng hoặc gắn kết khối lượng, dựa trên môi trường triển khai cụ thể và yêu cầu của bạn. Sau khi thực hiện bất kỳ thay đổi nào, vui lòng chạy lại `docker-compose up -d`. Bạn có thể tìm thấy danh sách đầy đủ các biến môi trường có sẵn [tại đây](https://docs.dify.ai/getting-started/install-self-hosted/environments). +### Giám sát Số liệu với Grafana + +Nhập bảng điều khiển vào Grafana, sử dụng cơ sở dữ liệu PostgreSQL của Dify làm nguồn dữ liệu, để giám sát số liệu theo mức độ chi tiết của ứng dụng, người thuê, tin nhắn và hơn thế nữa. + +- [Bảng điều khiển Grafana của @bowenliang123](https://github.com/bowenliang123/dify-grafana-dashboard) + +### Triển khai với Kubernetes + Nếu bạn muốn cấu hình một cài đặt có độ sẵn sàng cao, có các [Helm Charts](https://helm.sh/) và tệp YAML do cộng đồng đóng góp cho phép Dify được triển khai trên Kubernetes. - [Helm Chart bởi @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/docs/weaviate/WEAVIATE_MIGRATION_GUIDE/README.md b/docs/weaviate/WEAVIATE_MIGRATION_GUIDE/README.md new file mode 100644 index 0000000000..b2599e8c2e --- /dev/null +++ b/docs/weaviate/WEAVIATE_MIGRATION_GUIDE/README.md @@ -0,0 +1,187 @@ +# Weaviate Migration Guide: v1.19 → v1.27 + +## Overview + +Dify has upgraded from Weaviate v1.19 to v1.27 with the Python client updated from v3.24 to v4.17. + +## What Changed + +### Breaking Changes + +1. **Weaviate Server**: `1.19.0` → `1.27.0` +1. **Python Client**: `weaviate-client~=3.24.0` → `weaviate-client==4.17.0` +1. **gRPC Required**: Weaviate v1.27 requires gRPC port `50051` (in addition to HTTP port `8080`) +1. **Docker Compose**: Added temporary entrypoint overrides for client installation + +### Key Improvements + +- Faster vector operations via gRPC +- Improved batch processing +- Better error handling + +## Migration Steps + +### For Docker Users + +#### Step 1: Backup Your Data + +```bash +cd docker +docker compose down +sudo cp -r ./volumes/weaviate ./volumes/weaviate_backup_$(date +%Y%m%d) +``` + +#### Step 2: Update Dify + +```bash +git pull origin main +docker compose pull +``` + +#### Step 3: Start Services + +```bash +docker compose up -d +sleep 30 +curl http://localhost:8080/v1/meta +``` + +#### Step 4: Verify Migration + +```bash +# Check both ports are accessible +curl http://localhost:8080/v1/meta +netstat -tulpn | grep 50051 + +# Test in Dify UI: +# 1. Go to Knowledge Base +# 2. Test search functionality +# 3. Upload a test document +``` + +### For Source Installation + +#### Step 1: Update Dependencies + +```bash +cd api +uv sync --dev +uv run python -c "import weaviate; print(weaviate.__version__)" +# Should show: 4.17.0 +``` + +#### Step 2: Update Weaviate Server + +```bash +cd docker +docker compose -f docker-compose.middleware.yaml --profile weaviate up -d weaviate +curl http://localhost:8080/v1/meta +netstat -tulpn | grep 50051 +``` + +## Troubleshooting + +### Error: "No module named 'weaviate.classes'" + +**Solution**: + +```bash +cd api +uv sync --reinstall-package weaviate-client +uv run python -c "import weaviate; print(weaviate.__version__)" +# Should show: 4.17.0 +``` + +### Error: "gRPC health check failed" + +**Solution**: + +```bash +# Check Weaviate ports +docker ps | grep weaviate +# Should show: 0.0.0.0:8080->8080/tcp, 0.0.0.0:50051->50051/tcp + +# If missing gRPC port, add to docker-compose: +# ports: +# - "8080:8080" +# - "50051:50051" +``` + +### Error: "Weaviate version 1.19.0 is not supported" + +**Solution**: + +```bash +# Update Weaviate image in docker-compose +# Change: semitechnologies/weaviate:1.19.0 +# To: semitechnologies/weaviate:1.27.0 +docker compose down +docker compose up -d +``` + +### Data Migration Failed + +**Solution**: + +```bash +cd docker +docker compose down +sudo rm -rf ./volumes/weaviate +sudo cp -r ./volumes/weaviate_backup_YYYYMMDD ./volumes/weaviate +docker compose up -d +``` + +## Rollback Instructions + +```bash +# 1. Stop services +docker compose down + +# 2. Restore data backup +sudo rm -rf ./volumes/weaviate +sudo cp -r ./volumes/weaviate_backup_YYYYMMDD ./volumes/weaviate + +# 3. Checkout previous version +git checkout + +# 4. Restart services +docker compose up -d +``` + +## Compatibility + +| Component | Old Version | New Version | Compatible | +|-----------|-------------|-------------|------------| +| Weaviate Server | 1.19.0 | 1.27.0 | ✅ Yes | +| weaviate-client | ~3.24.0 | ==4.17.0 | ✅ Yes | +| Existing Data | v1.19 format | v1.27 format | ✅ Yes | + +## Testing Checklist + +Before deploying to production: + +- [ ] Backup all Weaviate data +- [ ] Test in staging environment +- [ ] Verify existing collections are accessible +- [ ] Test vector search functionality +- [ ] Test document upload and retrieval +- [ ] Monitor gRPC connection stability +- [ ] Check performance metrics + +## Support + +If you encounter issues: + +1. Check GitHub Issues: https://github.com/langgenius/dify/issues +1. Create a bug report with: + - Error messages + - Docker logs: `docker compose logs weaviate` + - Dify version + - Migration steps attempted + +## Important Notes + +- **Data Safety**: Existing vector data remains fully compatible +- **No Re-indexing**: No need to rebuild vector indexes +- **Temporary Workaround**: The entrypoint overrides are temporary until next Dify release +- **Performance**: May see improved performance due to gRPC usage diff --git a/docs/zh-CN/README.md b/docs/zh-CN/README.md index 202b99a6b1..888a0d7f12 100644 --- a/docs/zh-CN/README.md +++ b/docs/zh-CN/README.md @@ -127,6 +127,12 @@ docker compose up -d 如果您需要自定义配置,请参考 [.env.example](../../docker/.env.example) 文件中的注释,并更新 `.env` 文件中对应的值。此外,您可能需要根据您的具体部署环境和需求对 `docker-compose.yaml` 文件本身进行调整,例如更改镜像版本、端口映射或卷挂载。完成任何更改后,请重新运行 `docker-compose up -d`。您可以在[此处](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用环境变量的完整列表。 +### 使用 Grafana 进行指标监控 + +将仪表板导入 Grafana,使用 Dify 的 PostgreSQL 数据库作为数据源,以监控应用、租户、消息等粒度的指标。 + +- [由 @bowenliang123 提供的 Grafana 仪表板](https://github.com/bowenliang123/dify-grafana-dashboard) + #### 使用 Helm Chart 或 Kubernetes 资源清单(YAML)部署 使用 [Helm Chart](https://helm.sh/) 版本或者 Kubernetes 资源清单(YAML),可以在 Kubernetes 上部署 Dify。 diff --git a/docs/zh-TW/README.md b/docs/zh-TW/README.md index 526e8d9c8c..d8c484a6d4 100644 --- a/docs/zh-TW/README.md +++ b/docs/zh-TW/README.md @@ -130,6 +130,14 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify 如果您需要自定義配置,請參考我們的 [.env.example](../../docker/.env.example) 文件中的註釋,並在您的 `.env` 文件中更新相應的值。此外,根據您特定的部署環境和需求,您可能需要調整 `docker-compose.yaml` 文件本身,例如更改映像版本、端口映射或卷掛載。進行任何更改後,請重新運行 `docker-compose up -d`。您可以在[這裡](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用環境變數的完整列表。 +### 使用 Grafana 進行指標監控 + +將儀表板匯入 Grafana,使用 Dify 的 PostgreSQL 資料庫作為資料來源,以監控應用程式、租戶、訊息等顆粒度的指標。 + +- [由 @bowenliang123 提供的 Grafana 儀表板](https://github.com/bowenliang123/dify-grafana-dashboard) + +### 使用 Kubernetes 部署 + 如果您想配置高可用性設置,社區貢獻的 [Helm Charts](https://helm.sh/) 和 Kubernetes 資源清單(YAML)允許在 Kubernetes 上部署 Dify。 - [由 @LeoQuote 提供的 Helm Chart](https://github.com/douban/charts/tree/master/charts/dify) diff --git a/web/.storybook/main.ts b/web/.storybook/main.ts index fecf774e98..0605c71346 100644 --- a/web/.storybook/main.ts +++ b/web/.storybook/main.ts @@ -1,19 +1,29 @@ import type { StorybookConfig } from '@storybook/nextjs' const config: StorybookConfig = { - // stories: ['../stories/**/*.mdx', '../stories/**/*.stories.@(js|jsx|mjs|ts|tsx)'], stories: ['../app/components/**/*.stories.@(js|jsx|mjs|ts|tsx)'], addons: [ '@storybook/addon-onboarding', '@storybook/addon-links', - '@storybook/addon-essentials', + '@storybook/addon-docs', '@chromatic-com/storybook', - '@storybook/addon-interactions', ], framework: { name: '@storybook/nextjs', - options: {}, + options: { + builder: { + useSWC: true, + lazyCompilation: false, + }, + nextConfigPath: undefined, + }, }, staticDirs: ['../public'], + core: { + disableWhatsNewNotifications: true, + }, + docs: { + defaultName: 'Documentation', + }, } export default config diff --git a/web/.storybook/preview.tsx b/web/.storybook/preview.tsx index 55328602f9..1f5726de34 100644 --- a/web/.storybook/preview.tsx +++ b/web/.storybook/preview.tsx @@ -1,12 +1,21 @@ -import React from 'react' import type { Preview } from '@storybook/react' import { withThemeByDataAttribute } from '@storybook/addon-themes' -import I18nServer from '../app/components/i18n-server' +import { QueryClient, QueryClientProvider } from '@tanstack/react-query' +import I18N from '../app/components/i18n' +import { ToastProvider } from '../app/components/base/toast' import '../app/styles/globals.css' import '../app/styles/markdown.scss' import './storybook.css' +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + refetchOnWindowFocus: false, + }, + }, +}) + export const decorators = [ withThemeByDataAttribute({ themes: { @@ -17,9 +26,15 @@ export const decorators = [ attributeName: 'data-theme', }), (Story) => { - return - - + return ( + + + + + + + + ) }, ] @@ -31,7 +46,11 @@ const preview: Preview = { date: /Date$/i, }, }, + docs: { + toc: true, + }, }, + tags: ['autodocs'], } export default preview diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx index 907c270017..e4c3f60c12 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx @@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' import TracingIcon from './tracing-icon' import ProviderPanel from './provider-panel' -import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type' +import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' import { TracingProvider } from './type' import ProviderConfigModal from './provider-config-modal' import Indicator from '@/app/components/header/indicator' @@ -30,7 +30,8 @@ export type PopupProps = { opikConfig: OpikConfig | null weaveConfig: WeaveConfig | null aliyunConfig: AliyunConfig | null - onConfigUpdated: (provider: TracingProvider, payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig) => void + tencentConfig: TencentConfig | null + onConfigUpdated: (provider: TracingProvider, payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig) => void onConfigRemoved: (provider: TracingProvider) => void } @@ -48,6 +49,7 @@ const ConfigPopup: FC = ({ opikConfig, weaveConfig, aliyunConfig, + tencentConfig, onConfigUpdated, onConfigRemoved, }) => { @@ -81,8 +83,8 @@ const ConfigPopup: FC = ({ hideConfigModal() }, [currentProvider, hideConfigModal, onConfigRemoved]) - const providerAllConfigured = arizeConfig && phoenixConfig && langSmithConfig && langFuseConfig && opikConfig && weaveConfig && aliyunConfig - const providerAllNotConfigured = !arizeConfig && !phoenixConfig && !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig && !aliyunConfig + const providerAllConfigured = arizeConfig && phoenixConfig && langSmithConfig && langFuseConfig && opikConfig && weaveConfig && aliyunConfig && tencentConfig + const providerAllNotConfigured = !arizeConfig && !phoenixConfig && !langSmithConfig && !langFuseConfig && !opikConfig && !weaveConfig && !aliyunConfig && !tencentConfig const switchContent = ( = ({ key="aliyun-provider-panel" /> ) + + const tencentPanel = ( + + ) const configuredProviderPanel = () => { const configuredPanels: JSX.Element[] = [] @@ -206,6 +221,9 @@ const ConfigPopup: FC = ({ if (aliyunConfig) configuredPanels.push(aliyunPanel) + if (tencentConfig) + configuredPanels.push(tencentPanel) + return configuredPanels } @@ -233,6 +251,9 @@ const ConfigPopup: FC = ({ if (!aliyunConfig) notConfiguredPanels.push(aliyunPanel) + if (!tencentConfig) + notConfiguredPanels.push(tencentPanel) + return notConfiguredPanels } @@ -249,6 +270,8 @@ const ConfigPopup: FC = ({ return opikConfig if (currentProvider === TracingProvider.aliyun) return aliyunConfig + if (currentProvider === TracingProvider.tencent) + return tencentConfig return weaveConfig } @@ -297,6 +320,7 @@ const ConfigPopup: FC = ({ {arizePanel} {phoenixPanel} {aliyunPanel} + {tencentPanel} ) diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts index 4c81b63ea2..00f6224e9e 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config.ts @@ -8,4 +8,5 @@ export const docURL = { [TracingProvider.opik]: 'https://www.comet.com/docs/opik/tracing/integrations/dify#setup-instructions', [TracingProvider.weave]: 'https://weave-docs.wandb.ai/', [TracingProvider.aliyun]: 'https://help.aliyun.com/zh/arms/tracing-analysis/untitled-document-1750672984680', + [TracingProvider.tencent]: 'https://cloud.tencent.com/document/product/248/116531', } diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx index f79745c4dd..e1fd39fd48 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx @@ -8,12 +8,12 @@ import { import { useTranslation } from 'react-i18next' import { usePathname } from 'next/navigation' import { useBoolean } from 'ahooks' -import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type' +import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' import { TracingProvider } from './type' import TracingIcon from './tracing-icon' import ConfigButton from './config-button' import cn from '@/utils/classnames' -import { AliyunIcon, ArizeIcon, LangfuseIcon, LangsmithIcon, OpikIcon, PhoenixIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing' +import { AliyunIcon, ArizeIcon, LangfuseIcon, LangsmithIcon, OpikIcon, PhoenixIcon, TencentIcon, WeaveIcon } from '@/app/components/base/icons/src/public/tracing' import Indicator from '@/app/components/header/indicator' import { fetchTracingConfig as doFetchTracingConfig, fetchTracingStatus, updateTracingStatus } from '@/service/apps' import type { TracingStatus } from '@/models/app' @@ -71,6 +71,7 @@ const Panel: FC = () => { [TracingProvider.opik]: OpikIcon, [TracingProvider.weave]: WeaveIcon, [TracingProvider.aliyun]: AliyunIcon, + [TracingProvider.tencent]: TencentIcon, } const InUseProviderIcon = inUseTracingProvider ? providerIconMap[inUseTracingProvider] : undefined @@ -81,7 +82,8 @@ const Panel: FC = () => { const [opikConfig, setOpikConfig] = useState(null) const [weaveConfig, setWeaveConfig] = useState(null) const [aliyunConfig, setAliyunConfig] = useState(null) - const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig || aliyunConfig) + const [tencentConfig, setTencentConfig] = useState(null) + const hasConfiguredTracing = !!(langSmithConfig || langFuseConfig || opikConfig || weaveConfig || arizeConfig || phoenixConfig || aliyunConfig || tencentConfig) const fetchTracingConfig = async () => { const getArizeConfig = async () => { @@ -119,6 +121,11 @@ const Panel: FC = () => { if (!aliyunHasNotConfig) setAliyunConfig(aliyunConfig as AliyunConfig) } + const getTencentConfig = async () => { + const { tracing_config: tencentConfig, has_not_configured: tencentHasNotConfig } = await doFetchTracingConfig({ appId, provider: TracingProvider.tencent }) + if (!tencentHasNotConfig) + setTencentConfig(tencentConfig as TencentConfig) + } Promise.all([ getArizeConfig(), getPhoenixConfig(), @@ -127,6 +134,7 @@ const Panel: FC = () => { getOpikConfig(), getWeaveConfig(), getAliyunConfig(), + getTencentConfig(), ]) } @@ -147,6 +155,8 @@ const Panel: FC = () => { setWeaveConfig(tracing_config as WeaveConfig) else if (provider === TracingProvider.aliyun) setAliyunConfig(tracing_config as AliyunConfig) + else if (provider === TracingProvider.tencent) + setTencentConfig(tracing_config as TencentConfig) } const handleTracingConfigRemoved = (provider: TracingProvider) => { @@ -164,6 +174,8 @@ const Panel: FC = () => { setWeaveConfig(null) else if (provider === TracingProvider.aliyun) setAliyunConfig(null) + else if (provider === TracingProvider.tencent) + setTencentConfig(null) if (provider === inUseTracingProvider) { handleTracingStatusChange({ enabled: false, @@ -209,6 +221,7 @@ const Panel: FC = () => { opikConfig={opikConfig} weaveConfig={weaveConfig} aliyunConfig={aliyunConfig} + tencentConfig={tencentConfig} onConfigUpdated={handleTracingConfigUpdated} onConfigRemoved={handleTracingConfigRemoved} > @@ -245,6 +258,7 @@ const Panel: FC = () => { opikConfig={opikConfig} weaveConfig={weaveConfig} aliyunConfig={aliyunConfig} + tencentConfig={tencentConfig} onConfigUpdated={handleTracingConfigUpdated} onConfigRemoved={handleTracingConfigRemoved} > diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx index 318f1f61d6..9682bf6a07 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/provider-config-modal.tsx @@ -4,7 +4,7 @@ import React, { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' import Field from './field' -import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, WeaveConfig } from './type' +import type { AliyunConfig, ArizeConfig, LangFuseConfig, LangSmithConfig, OpikConfig, PhoenixConfig, TencentConfig, WeaveConfig } from './type' import { TracingProvider } from './type' import { docURL } from './config' import { @@ -22,10 +22,10 @@ import Divider from '@/app/components/base/divider' type Props = { appId: string type: TracingProvider - payload?: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | null + payload?: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig | null onRemoved: () => void onCancel: () => void - onSaved: (payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig) => void + onSaved: (payload: ArizeConfig | PhoenixConfig | LangSmithConfig | LangFuseConfig | OpikConfig | WeaveConfig | AliyunConfig | TencentConfig) => void onChosen: (provider: TracingProvider) => void } @@ -77,6 +77,12 @@ const aliyunConfigTemplate = { endpoint: '', } +const tencentConfigTemplate = { + token: '', + endpoint: '', + service_name: '', +} + const ProviderConfigModal: FC = ({ appId, type, @@ -90,7 +96,7 @@ const ProviderConfigModal: FC = ({ const isEdit = !!payload const isAdd = !isEdit const [isSaving, setIsSaving] = useState(false) - const [config, setConfig] = useState((() => { + const [config, setConfig] = useState((() => { if (isEdit) return payload @@ -112,6 +118,9 @@ const ProviderConfigModal: FC = ({ else if (type === TracingProvider.aliyun) return aliyunConfigTemplate + else if (type === TracingProvider.tencent) + return tencentConfigTemplate + return weaveConfigTemplate })()) const [isShowRemoveConfirm, { @@ -202,6 +211,16 @@ const ProviderConfigModal: FC = ({ errorMessage = t('common.errorMsg.fieldRequired', { field: 'Endpoint' }) } + if (type === TracingProvider.tencent) { + const postData = config as TencentConfig + if (!errorMessage && !postData.token) + errorMessage = t('common.errorMsg.fieldRequired', { field: 'Token' }) + if (!errorMessage && !postData.endpoint) + errorMessage = t('common.errorMsg.fieldRequired', { field: 'Endpoint' }) + if (!errorMessage && !postData.service_name) + errorMessage = t('common.errorMsg.fieldRequired', { field: 'Service Name' }) + } + return errorMessage }, [config, t, type]) const handleSave = useCallback(async () => { @@ -338,6 +357,34 @@ const ProviderConfigModal: FC = ({ /> )} + {type === TracingProvider.tencent && ( + <> + + + + + )} {type === TracingProvider.weave && ( <> { [TracingProvider.opik]: OpikIconBig, [TracingProvider.weave]: WeaveIconBig, [TracingProvider.aliyun]: AliyunIconBig, + [TracingProvider.tencent]: TencentIconBig, })[type] } diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts index 78bca41ad2..719451f5d0 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/type.ts @@ -6,6 +6,7 @@ export enum TracingProvider { opik = 'opik', weave = 'weave', aliyun = 'aliyun', + tencent = 'tencent', } export type ArizeConfig = { @@ -53,3 +54,9 @@ export type AliyunConfig = { license_key: string endpoint: string } + +export type TencentConfig = { + token: string + endpoint: string + service_name: string +} diff --git a/web/app/(shareLayout)/components/authenticated-layout.tsx b/web/app/(shareLayout)/components/authenticated-layout.tsx index e3cfc8e6a8..2185606a6d 100644 --- a/web/app/(shareLayout)/components/authenticated-layout.tsx +++ b/web/app/(shareLayout)/components/authenticated-layout.tsx @@ -2,16 +2,17 @@ import AppUnavailable from '@/app/components/base/app-unavailable' import Loading from '@/app/components/base/loading' -import { removeAccessToken } from '@/app/components/share/utils' import { useWebAppStore } from '@/context/web-app-context' import { useGetUserCanAccessApp } from '@/service/access-control' import { useGetWebAppInfo, useGetWebAppMeta, useGetWebAppParams } from '@/service/use-share' +import { webAppLogout } from '@/service/webapp-auth' import { usePathname, useRouter, useSearchParams } from 'next/navigation' import React, { useCallback, useEffect } from 'react' import { useTranslation } from 'react-i18next' const AuthenticatedLayout = ({ children }: { children: React.ReactNode }) => { const { t } = useTranslation() + const shareCode = useWebAppStore(s => s.shareCode) const updateAppInfo = useWebAppStore(s => s.updateAppInfo) const updateAppParams = useWebAppStore(s => s.updateAppParams) const updateWebAppMeta = useWebAppStore(s => s.updateWebAppMeta) @@ -41,11 +42,11 @@ const AuthenticatedLayout = ({ children }: { children: React.ReactNode }) => { return `/webapp-signin?${params.toString()}` }, [searchParams, pathname]) - const backToHome = useCallback(() => { - removeAccessToken() + const backToHome = useCallback(async () => { + await webAppLogout(shareCode!) const url = getSigninUrl() router.replace(url) - }, [getSigninUrl, router]) + }, [getSigninUrl, router, webAppLogout, shareCode]) if (appInfoError) { return
diff --git a/web/app/(shareLayout)/components/splash.tsx b/web/app/(shareLayout)/components/splash.tsx index 4fe9efe4dd..c26ea7e045 100644 --- a/web/app/(shareLayout)/components/splash.tsx +++ b/web/app/(shareLayout)/components/splash.tsx @@ -1,15 +1,16 @@ 'use client' import type { FC, PropsWithChildren } from 'react' -import { useEffect } from 'react' +import { useEffect, useState } from 'react' import { useCallback } from 'react' import { useWebAppStore } from '@/context/web-app-context' import { useRouter, useSearchParams } from 'next/navigation' import AppUnavailable from '@/app/components/base/app-unavailable' -import { checkOrSetAccessToken, removeAccessToken, setAccessToken } from '@/app/components/share/utils' import { useTranslation } from 'react-i18next' +import { AccessMode } from '@/models/access-control' +import { webAppLoginStatus, webAppLogout } from '@/service/webapp-auth' import { fetchAccessToken } from '@/service/share' import Loading from '@/app/components/base/loading' -import { AccessMode } from '@/models/access-control' +import { setWebAppAccessToken, setWebAppPassport } from '@/service/webapp-auth' const Splash: FC = ({ children }) => { const { t } = useTranslation() @@ -18,9 +19,9 @@ const Splash: FC = ({ children }) => { const searchParams = useSearchParams() const router = useRouter() const redirectUrl = searchParams.get('redirect_url') - const tokenFromUrl = searchParams.get('web_sso_token') const message = searchParams.get('message') const code = searchParams.get('code') + const tokenFromUrl = searchParams.get('web_sso_token') const getSigninUrl = useCallback(() => { const params = new URLSearchParams(searchParams) params.delete('message') @@ -28,35 +29,66 @@ const Splash: FC = ({ children }) => { return `/webapp-signin?${params.toString()}` }, [searchParams]) - const backToHome = useCallback(() => { - removeAccessToken() + const backToHome = useCallback(async () => { + await webAppLogout(shareCode!) const url = getSigninUrl() router.replace(url) - }, [getSigninUrl, router]) + }, [getSigninUrl, router, webAppLogout, shareCode]) + const needCheckIsLogin = webAppAccessMode !== AccessMode.PUBLIC + const [isLoading, setIsLoading] = useState(true) useEffect(() => { + if (message) { + setIsLoading(false) + return + } + + if(tokenFromUrl) + setWebAppAccessToken(tokenFromUrl) + + const redirectOrFinish = () => { + if (redirectUrl) + router.replace(decodeURIComponent(redirectUrl)) + else + setIsLoading(false) + } + + const proceedToAuth = () => { + setIsLoading(false) + } + (async () => { - if (message) - return - if (shareCode && tokenFromUrl && redirectUrl) { - localStorage.setItem('webapp_access_token', tokenFromUrl) - const tokenResp = await fetchAccessToken({ appCode: shareCode, webAppAccessToken: tokenFromUrl }) - await setAccessToken(shareCode, tokenResp.access_token) - router.replace(decodeURIComponent(redirectUrl)) - return + const { userLoggedIn, appLoggedIn } = await webAppLoginStatus(needCheckIsLogin, shareCode!) + + if (userLoggedIn && appLoggedIn) { + redirectOrFinish() } - if (shareCode && redirectUrl && localStorage.getItem('webapp_access_token')) { - const tokenResp = await fetchAccessToken({ appCode: shareCode, webAppAccessToken: localStorage.getItem('webapp_access_token') }) - await setAccessToken(shareCode, tokenResp.access_token) - router.replace(decodeURIComponent(redirectUrl)) - return + else if (!userLoggedIn && !appLoggedIn) { + proceedToAuth() } - if (webAppAccessMode === AccessMode.PUBLIC && redirectUrl) { - await checkOrSetAccessToken(shareCode) - router.replace(decodeURIComponent(redirectUrl)) + else if (!userLoggedIn && appLoggedIn) { + redirectOrFinish() + } + else if (userLoggedIn && !appLoggedIn) { + try { + const { access_token } = await fetchAccessToken({ appCode: shareCode! }) + setWebAppPassport(shareCode!, access_token) + redirectOrFinish() + } + catch (error) { + await webAppLogout(shareCode!) + proceedToAuth() + } } })() - }, [shareCode, redirectUrl, router, tokenFromUrl, message, webAppAccessMode]) + }, [ + shareCode, + redirectUrl, + router, + message, + webAppAccessMode, + needCheckIsLogin, + tokenFromUrl]) if (message) { return
@@ -64,12 +96,8 @@ const Splash: FC = ({ children }) => { {code === '403' ? t('common.userProfile.logout') : t('share.login.backToHome')}
} - if (tokenFromUrl) { - return
- -
- } - if (webAppAccessMode === AccessMode.PUBLIC && redirectUrl) { + + if (isLoading) { return
diff --git a/web/app/(shareLayout)/webapp-signin/check-code/page.tsx b/web/app/(shareLayout)/webapp-signin/check-code/page.tsx index 3fc32fec71..4a1326fedf 100644 --- a/web/app/(shareLayout)/webapp-signin/check-code/page.tsx +++ b/web/app/(shareLayout)/webapp-signin/check-code/page.tsx @@ -10,7 +10,7 @@ import Input from '@/app/components/base/input' import Toast from '@/app/components/base/toast' import { sendWebAppEMailLoginCode, webAppEmailLoginWithCode } from '@/service/common' import I18NContext from '@/context/i18n' -import { setAccessToken } from '@/app/components/share/utils' +import { setWebAppAccessToken, setWebAppPassport } from '@/service/webapp-auth' import { fetchAccessToken } from '@/service/share' export default function CheckCode() { @@ -62,9 +62,9 @@ export default function CheckCode() { setIsLoading(true) const ret = await webAppEmailLoginWithCode({ email, code, token }) if (ret.result === 'success') { - localStorage.setItem('webapp_access_token', ret.data.access_token) - const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: ret.data.access_token }) - await setAccessToken(appCode, tokenResp.access_token) + setWebAppAccessToken(ret.data.access_token) + const { access_token } = await fetchAccessToken({ appCode: appCode! }) + setWebAppPassport(appCode!, access_token) router.replace(decodeURIComponent(redirectUrl)) } } diff --git a/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx b/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx index 2201b28a2f..ce220b103e 100644 --- a/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx +++ b/web/app/(shareLayout)/webapp-signin/components/mail-and-password-auth.tsx @@ -11,15 +11,13 @@ import { webAppLogin } from '@/service/common' import Input from '@/app/components/base/input' import I18NContext from '@/context/i18n' import { noop } from 'lodash-es' -import { setAccessToken } from '@/app/components/share/utils' import { fetchAccessToken } from '@/service/share' +import { setWebAppAccessToken, setWebAppPassport } from '@/service/webapp-auth' type MailAndPasswordAuthProps = { isEmailSetup: boolean } -const passwordRegex = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/ - export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAuthProps) { const { t } = useTranslation() const { locale } = useContext(I18NContext) @@ -43,8 +41,8 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut return appCode }, [redirectUrl]) + const appCode = getAppCodeFromRedirectUrl() const handleEmailPasswordLogin = async () => { - const appCode = getAppCodeFromRedirectUrl() if (!email) { Toast.notify({ type: 'error', message: t('login.error.emailEmpty') }) return @@ -60,13 +58,7 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut Toast.notify({ type: 'error', message: t('login.error.passwordEmpty') }) return } - if (!passwordRegex.test(password)) { - Toast.notify({ - type: 'error', - message: t('login.error.passwordInvalid'), - }) - return - } + if (!redirectUrl || !appCode) { Toast.notify({ type: 'error', @@ -88,9 +80,10 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut body: loginData, }) if (res.result === 'success') { - localStorage.setItem('webapp_access_token', res.data.access_token) - const tokenResp = await fetchAccessToken({ appCode, webAppAccessToken: res.data.access_token }) - await setAccessToken(appCode, tokenResp.access_token) + setWebAppAccessToken(res.data.access_token) + + const { access_token } = await fetchAccessToken({ appCode: appCode! }) + setWebAppPassport(appCode!, access_token) router.replace(decodeURIComponent(redirectUrl)) } else { @@ -100,7 +93,10 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut }) } } - + catch (e: any) { + if (e.code === 'authentication_failed') + Toast.notify({ type: 'error', message: e.message }) + } finally { setIsLoading(false) } @@ -138,9 +134,9 @@ export default function MailAndPasswordAuth({ isEmailSetup }: MailAndPasswordAut
setPassword(e.target.value)} + id="password" onKeyDown={(e) => { if (e.key === 'Enter') handleEmailPasswordLogin() diff --git a/web/app/(shareLayout)/webapp-signin/page.tsx b/web/app/(shareLayout)/webapp-signin/page.tsx index 1c6209b902..2ffa19c0c9 100644 --- a/web/app/(shareLayout)/webapp-signin/page.tsx +++ b/web/app/(shareLayout)/webapp-signin/page.tsx @@ -3,13 +3,13 @@ import { useRouter, useSearchParams } from 'next/navigation' import type { FC } from 'react' import React, { useCallback } from 'react' import { useTranslation } from 'react-i18next' -import { removeAccessToken } from '@/app/components/share/utils' import { useGlobalPublicStore } from '@/context/global-public-context' import AppUnavailable from '@/app/components/base/app-unavailable' import NormalForm from './normalForm' import { AccessMode } from '@/models/access-control' import ExternalMemberSsoAuth from './components/external-member-sso-auth' import { useWebAppStore } from '@/context/web-app-context' +import { webAppLogout } from '@/service/webapp-auth' const WebSSOForm: FC = () => { const { t } = useTranslation() @@ -26,11 +26,12 @@ const WebSSOForm: FC = () => { return `/webapp-signin?${params.toString()}` }, [redirectUrl]) - const backToHome = useCallback(() => { - removeAccessToken() + const shareCode = useWebAppStore(s => s.shareCode) + const backToHome = useCallback(async () => { + await webAppLogout(shareCode!) const url = getSigninUrl() router.replace(url) - }, [getSigninUrl, router]) + }, [getSigninUrl, router, webAppLogout, shareCode]) if (!redirectUrl) { return
diff --git a/web/app/account/(commonLayout)/account-page/email-change-modal.tsx b/web/app/account/(commonLayout)/account-page/email-change-modal.tsx index bd00f27ac5..d04cd18557 100644 --- a/web/app/account/(commonLayout)/account-page/email-change-modal.tsx +++ b/web/app/account/(commonLayout)/account-page/email-change-modal.tsx @@ -9,7 +9,6 @@ import Button from '@/app/components/base/button' import Input from '@/app/components/base/input' import { checkEmailExisted, - logout, resetEmail, sendVerifyCode, verifyEmail, @@ -17,6 +16,7 @@ import { import { noop } from 'lodash-es' import { asyncRunSafe } from '@/utils' import type { ResponseError } from '@/service/fetch' +import { useLogout } from '@/service/use-common' type Props = { show: boolean @@ -167,15 +167,12 @@ const EmailChangeModal = ({ onClose, email, show }: Props) => { setStep(STEP.verifyNew) } + const { mutateAsync: logout } = useLogout() const handleLogout = async () => { - await logout({ - url: '/logout', - params: {}, - }) + await logout() localStorage.removeItem('setup_status') - localStorage.removeItem('console_token') - localStorage.removeItem('refresh_token') + // Tokens are now stored in cookies and cleared by backend router.push('/signin') } diff --git a/web/app/account/(commonLayout)/avatar.tsx b/web/app/account/(commonLayout)/avatar.tsx index ea897e639f..d8943b7879 100644 --- a/web/app/account/(commonLayout)/avatar.tsx +++ b/web/app/account/(commonLayout)/avatar.tsx @@ -7,11 +7,11 @@ import { } from '@remixicon/react' import { Menu, MenuButton, MenuItem, MenuItems, Transition } from '@headlessui/react' import Avatar from '@/app/components/base/avatar' -import { logout } from '@/service/common' import { useAppContext } from '@/context/app-context' import { useProviderContext } from '@/context/provider-context' import { LogOut01 } from '@/app/components/base/icons/src/vender/line/general' import PremiumBadge from '@/app/components/base/premium-badge' +import { useLogout } from '@/service/use-common' export type IAppSelector = { isMobile: boolean @@ -23,15 +23,12 @@ export default function AppSelector() { const { userProfile } = useAppContext() const { isEducationAccount } = useProviderContext() + const { mutateAsync: logout } = useLogout() const handleLogout = async () => { - await logout({ - url: '/logout', - params: {}, - }) + await logout() localStorage.removeItem('setup_status') - localStorage.removeItem('console_token') - localStorage.removeItem('refresh_token') + // Tokens are now stored in cookies and cleared by backend router.push('/signin') } diff --git a/web/app/account/(commonLayout)/delete-account/components/feed-back.tsx b/web/app/account/(commonLayout)/delete-account/components/feed-back.tsx index 2cd30bc3f2..64a378d2fe 100644 --- a/web/app/account/(commonLayout)/delete-account/components/feed-back.tsx +++ b/web/app/account/(commonLayout)/delete-account/components/feed-back.tsx @@ -8,7 +8,7 @@ import Button from '@/app/components/base/button' import CustomDialog from '@/app/components/base/dialog' import Textarea from '@/app/components/base/textarea' import Toast from '@/app/components/base/toast' -import { logout } from '@/service/common' +import { useLogout } from '@/service/use-common' type DeleteAccountProps = { onCancel: () => void @@ -22,14 +22,11 @@ export default function FeedBack(props: DeleteAccountProps) { const [userFeedback, setUserFeedback] = useState('') const { isPending, mutateAsync: sendFeedback } = useDeleteAccountFeedback() + const { mutateAsync: logout } = useLogout() const handleSuccess = useCallback(async () => { try { - await logout({ - url: '/logout', - params: {}, - }) - localStorage.removeItem('refresh_token') - localStorage.removeItem('console_token') + await logout() + // Tokens are now stored in cookies and cleared by backend router.push('/signin') Toast.notify({ type: 'info', message: t('common.account.deleteSuccessTip') }) } diff --git a/web/app/account/oauth/authorize/layout.tsx b/web/app/account/oauth/authorize/layout.tsx index 078d23114a..2ab676d6b6 100644 --- a/web/app/account/oauth/authorize/layout.tsx +++ b/web/app/account/oauth/authorize/layout.tsx @@ -5,17 +5,22 @@ import cn from '@/utils/classnames' import { useGlobalPublicStore } from '@/context/global-public-context' import useDocumentTitle from '@/hooks/use-document-title' import { AppContextProvider } from '@/context/app-context' -import { useMemo } from 'react' +import { useIsLogin } from '@/service/use-common' +import Loading from '@/app/components/base/loading' export default function SignInLayout({ children }: any) { const { systemFeatures } = useGlobalPublicStore() useDocumentTitle('') - const isLoggedIn = useMemo(() => { - try { - return Boolean(localStorage.getItem('console_token') && localStorage.getItem('refresh_token')) - } - catch { return false } - }, []) + const { isLoading, data: loginData } = useIsLogin() + const isLoggedIn = loginData?.logged_in + + if(isLoading) { + return ( +
+ +
+ ) + } return <>
diff --git a/web/app/account/oauth/authorize/page.tsx b/web/app/account/oauth/authorize/page.tsx index 6ad63996ae..4aa5fa0b8e 100644 --- a/web/app/account/oauth/authorize/page.tsx +++ b/web/app/account/oauth/authorize/page.tsx @@ -1,6 +1,6 @@ 'use client' -import React, { useEffect, useMemo, useRef } from 'react' +import React, { useEffect, useRef } from 'react' import { useTranslation } from 'react-i18next' import { useRouter, useSearchParams } from 'next/navigation' import Button from '@/app/components/base/button' @@ -18,6 +18,7 @@ import { RiTranslate2, } from '@remixicon/react' import dayjs from 'dayjs' +import { useIsLogin } from '@/service/use-common' export const OAUTH_AUTHORIZE_PENDING_KEY = 'oauth_authorize_pending' export const REDIRECT_URL_KEY = 'oauth_redirect_url' @@ -74,17 +75,13 @@ export default function OAuthAuthorize() { const client_id = decodeURIComponent(searchParams.get('client_id') || '') const redirect_uri = decodeURIComponent(searchParams.get('redirect_uri') || '') const { userProfile } = useAppContext() - const { data: authAppInfo, isLoading, isError } = useOAuthAppInfo(client_id, redirect_uri) + const { data: authAppInfo, isLoading: isOAuthLoading, isError } = useOAuthAppInfo(client_id, redirect_uri) const { mutateAsync: authorize, isPending: authorizing } = useAuthorizeOAuthApp() const hasNotifiedRef = useRef(false) - const isLoggedIn = useMemo(() => { - try { - return Boolean(localStorage.getItem('console_token') && localStorage.getItem('refresh_token')) - } - catch { return false } - }, []) - + const { isLoading: isIsLoginLoading, data: loginData } = useIsLogin() + const isLoggedIn = loginData?.logged_in + const isLoading = isOAuthLoading || isIsLoginLoading const onLoginSwitchClick = () => { try { const returnUrl = buildReturnUrl('/account/oauth/authorize', `?client_id=${encodeURIComponent(client_id)}&redirect_uri=${encodeURIComponent(redirect_uri)}`) diff --git a/web/app/components/app-sidebar/dataset-info/menu.tsx b/web/app/components/app-sidebar/dataset-info/menu.tsx index fd560ce643..6f91c9c513 100644 --- a/web/app/components/app-sidebar/dataset-info/menu.tsx +++ b/web/app/components/app-sidebar/dataset-info/menu.tsx @@ -3,6 +3,7 @@ import { useTranslation } from 'react-i18next' import MenuItem from './menu-item' import { RiDeleteBinLine, RiEditLine, RiFileDownloadLine } from '@remixicon/react' import Divider from '../../base/divider' +import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail' type MenuProps = { showDelete: boolean @@ -18,6 +19,7 @@ const Menu = ({ detectIsUsedByApp, }: MenuProps) => { const { t } = useTranslation() + const runtimeMode = useDatasetDetailContextWithSelector(state => state.dataset?.runtime_mode) return (
@@ -27,11 +29,13 @@ const Menu = ({ name={t('common.operation.edit')} handleClick={openRenameModal} /> - + {runtimeMode === 'rag_pipeline' && ( + + )}
{showDelete && ( <> diff --git a/web/app/components/app/annotation/edit-annotation-modal/edit-item/index.tsx b/web/app/components/app/annotation/edit-annotation-modal/edit-item/index.tsx index 17cb456558..e808d0b48a 100644 --- a/web/app/components/app/annotation/edit-annotation-modal/edit-item/index.tsx +++ b/web/app/components/app/annotation/edit-annotation-modal/edit-item/index.tsx @@ -1,6 +1,6 @@ 'use client' import type { FC } from 'react' -import React, { useState } from 'react' +import React, { useEffect, useState } from 'react' import { useTranslation } from 'react-i18next' import { RiDeleteBinLine, RiEditFill, RiEditLine } from '@remixicon/react' import { Robot, User } from '@/app/components/base/icons/src/public/avatar' @@ -16,7 +16,7 @@ type Props = { type: EditItemType content: string readonly?: boolean - onSave: (content: string) => void + onSave: (content: string) => Promise } export const EditTitle: FC<{ className?: string; title: string }> = ({ className, title }) => ( @@ -46,8 +46,13 @@ const EditItem: FC = ({ const placeholder = type === EditItemType.Query ? t('appAnnotation.editModal.queryPlaceholder') : t('appAnnotation.editModal.answerPlaceholder') const [isEdit, setIsEdit] = useState(false) - const handleSave = () => { - onSave(newContent) + // Reset newContent when content prop changes + useEffect(() => { + setNewContent('') + }, [content]) + + const handleSave = async () => { + await onSave(newContent) setIsEdit(false) } diff --git a/web/app/components/app/annotation/view-annotation-modal/index.tsx b/web/app/components/app/annotation/view-annotation-modal/index.tsx index 08904d23d4..8426ab0005 100644 --- a/web/app/components/app/annotation/view-annotation-modal/index.tsx +++ b/web/app/components/app/annotation/view-annotation-modal/index.tsx @@ -21,7 +21,7 @@ type Props = { isShow: boolean onHide: () => void item: AnnotationItem - onSave: (editedQuery: string, editedAnswer: string) => void + onSave: (editedQuery: string, editedAnswer: string) => Promise onRemove: () => void } @@ -46,6 +46,16 @@ const ViewAnnotationModal: FC = ({ const [currPage, setCurrPage] = React.useState(0) const [total, setTotal] = useState(0) const [hitHistoryList, setHitHistoryList] = useState([]) + + // Update local state when item prop changes (e.g., when modal is reopened with updated data) + useEffect(() => { + setNewQuery(question) + setNewAnswer(answer) + setCurrPage(0) + setTotal(0) + setHitHistoryList([]) + }, [question, answer, id]) + const fetchHitHistory = async (page = 1) => { try { const { data, total }: any = await fetchHitHistoryList(appId, id, { @@ -63,6 +73,12 @@ const ViewAnnotationModal: FC = ({ fetchHitHistory(currPage + 1) }, [currPage]) + // Fetch hit history when item changes + useEffect(() => { + if (isShow && id) + fetchHitHistory(1) + }, [id, isShow]) + const tabs = [ { value: TabType.annotation, text: t('appAnnotation.viewModal.annotatedResponse') }, { @@ -82,14 +98,20 @@ const ViewAnnotationModal: FC = ({ }, ] const [activeTab, setActiveTab] = useState(TabType.annotation) - const handleSave = (type: EditItemType, editedContent: string) => { - if (type === EditItemType.Query) { - setNewQuery(editedContent) - onSave(editedContent, newAnswer) + const handleSave = async (type: EditItemType, editedContent: string) => { + try { + if (type === EditItemType.Query) { + await onSave(editedContent, newAnswer) + setNewQuery(editedContent) + } + else { + await onSave(newQuestion, editedContent) + setNewAnswer(editedContent) + } } - else { - setNewAnswer(editedContent) - onSave(newQuestion, editedContent) + catch (error) { + // If save fails, don't update local state + console.error('Failed to save annotation:', error) } } const [showModal, setShowModal] = useState(false) diff --git a/web/app/components/app/app-access-control/access-control-dialog.tsx b/web/app/components/app/app-access-control/access-control-dialog.tsx index 479eedc9cf..ee3fa9650b 100644 --- a/web/app/components/app/app-access-control/access-control-dialog.tsx +++ b/web/app/components/app/app-access-control/access-control-dialog.tsx @@ -22,7 +22,7 @@ const AccessControlDialog = ({ }, [onClose]) return ( - null}> + null}> -
+
diff --git a/web/app/components/app/app-access-control/add-member-or-group-pop.tsx b/web/app/components/app/app-access-control/add-member-or-group-pop.tsx index 0fad6cc740..e9519aeedf 100644 --- a/web/app/components/app/app-access-control/add-member-or-group-pop.tsx +++ b/web/app/components/app/app-access-control/add-member-or-group-pop.tsx @@ -52,7 +52,7 @@ export default function AddMemberOrGroupDialog() { {open && } - +
diff --git a/web/app/components/app/app-publisher/features-wrapper.tsx b/web/app/components/app/app-publisher/features-wrapper.tsx index dadd112135..409c390f4b 100644 --- a/web/app/components/app/app-publisher/features-wrapper.tsx +++ b/web/app/components/app/app-publisher/features-wrapper.tsx @@ -1,6 +1,6 @@ import React, { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' -import produce from 'immer' +import { produce } from 'immer' import type { AppPublisherProps } from '@/app/components/app/app-publisher' import Confirm from '@/app/components/base/confirm' import AppPublisher from '@/app/components/app/app-publisher' diff --git a/web/app/components/app/configuration/base/icons/citation.tsx b/web/app/components/app/configuration/base/icons/citation.tsx deleted file mode 100644 index 3aa6b0f0e1..0000000000 --- a/web/app/components/app/configuration/base/icons/citation.tsx +++ /dev/null @@ -1,29 +0,0 @@ -import type { SVGProps } from 'react' - -const CitationIcon = (props: SVGProps) => ( - -) - -export default CitationIcon diff --git a/web/app/components/app/configuration/base/icons/more-like-this-icon.tsx b/web/app/components/app/configuration/base/icons/more-like-this-icon.tsx deleted file mode 100644 index 74c808eb39..0000000000 --- a/web/app/components/app/configuration/base/icons/more-like-this-icon.tsx +++ /dev/null @@ -1,14 +0,0 @@ -'use client' -import type { FC } from 'react' -import React from 'react' - -const MoreLikeThisIcon: FC = () => { - return ( - - - - - - ) -} -export default React.memo(MoreLikeThisIcon) diff --git a/web/app/components/app/configuration/base/icons/suggested-questions-after-answer-icon.tsx b/web/app/components/app/configuration/base/icons/suggested-questions-after-answer-icon.tsx deleted file mode 100644 index cabc2e4d73..0000000000 --- a/web/app/components/app/configuration/base/icons/suggested-questions-after-answer-icon.tsx +++ /dev/null @@ -1,12 +0,0 @@ -'use client' -import type { FC } from 'react' -import React from 'react' - -const SuggestedQuestionsAfterAnswerIcon: FC = () => { - return ( - - - - ) -} -export default React.memo(SuggestedQuestionsAfterAnswerIcon) diff --git a/web/app/components/app/configuration/config-prompt/advanced-prompt-input.tsx b/web/app/components/app/configuration/config-prompt/advanced-prompt-input.tsx index e2d37bb9de..70e0334e98 100644 --- a/web/app/components/app/configuration/config-prompt/advanced-prompt-input.tsx +++ b/web/app/components/app/configuration/config-prompt/advanced-prompt-input.tsx @@ -5,7 +5,7 @@ import copy from 'copy-to-clipboard' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' import { useBoolean } from 'ahooks' -import produce from 'immer' +import { produce } from 'immer' import { RiDeleteBinLine, RiErrorWarningFill, diff --git a/web/app/components/app/configuration/config-prompt/index.tsx b/web/app/components/app/configuration/config-prompt/index.tsx index 1caca47bcb..ec34588e41 100644 --- a/web/app/components/app/configuration/config-prompt/index.tsx +++ b/web/app/components/app/configuration/config-prompt/index.tsx @@ -2,7 +2,7 @@ import type { FC } from 'react' import React from 'react' import { useContext } from 'use-context-selector' -import produce from 'immer' +import { produce } from 'immer' import { RiAddLine, } from '@remixicon/react' diff --git a/web/app/components/app/configuration/config-prompt/simple-prompt-input.tsx b/web/app/components/app/configuration/config-prompt/simple-prompt-input.tsx index a7bdc550d1..169e8a14a2 100644 --- a/web/app/components/app/configuration/config-prompt/simple-prompt-input.tsx +++ b/web/app/components/app/configuration/config-prompt/simple-prompt-input.tsx @@ -3,7 +3,7 @@ import type { FC } from 'react' import React, { useState } from 'react' import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' -import produce from 'immer' +import { produce } from 'immer' import { useContext } from 'use-context-selector' import ConfirmAddVar from './confirm-add-var' import PromptEditorHeightResizeWrap from './prompt-editor-height-resize-wrap' diff --git a/web/app/components/app/configuration/config-var/config-modal/index.tsx b/web/app/components/app/configuration/config-var/config-modal/index.tsx index 8a02ca8caa..de7d2c9eac 100644 --- a/web/app/components/app/configuration/config-var/config-modal/index.tsx +++ b/web/app/components/app/configuration/config-var/config-modal/index.tsx @@ -3,7 +3,7 @@ import type { ChangeEvent, FC } from 'react' import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' -import produce from 'immer' +import { produce } from 'immer' import ModalFoot from '../modal-foot' import ConfigSelect from '../config-select' import ConfigString from '../config-string' diff --git a/web/app/components/app/configuration/config-var/index.tsx b/web/app/components/app/configuration/config-var/index.tsx index 2ac68227e3..6726498294 100644 --- a/web/app/components/app/configuration/config-var/index.tsx +++ b/web/app/components/app/configuration/config-var/index.tsx @@ -1,10 +1,11 @@ 'use client' import type { FC } from 'react' -import React, { useState } from 'react' +import React, { useMemo, useState } from 'react' import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' import { useContext } from 'use-context-selector' -import produce from 'immer' +import { produce } from 'immer' +import { ReactSortable } from 'react-sortablejs' import Panel from '../base/feature-panel' import EditModal from './config-modal' import VarItem from './var-item' @@ -22,6 +23,7 @@ import { useModalContext } from '@/context/modal-context' import { useEventEmitterContextContext } from '@/context/event-emitter' import type { InputVar } from '@/app/components/workflow/types' import { InputVarType } from '@/app/components/workflow/types' +import cn from '@/utils/classnames' export const ADD_EXTERNAL_DATA_TOOL = 'ADD_EXTERNAL_DATA_TOOL' @@ -218,6 +220,16 @@ const ConfigVar: FC = ({ promptVariables, readonly, onPromptVar showEditModal() } + + const promptVariablesWithIds = useMemo(() => promptVariables.map((item) => { + return { + id: item.key, + variable: { ...item }, + } + }), [promptVariables]) + + const canDrag = !readonly && promptVariables.length > 1 + return ( = ({ promptVariables, readonly, onPromptVar )} {hasVar && (
- {promptVariables.map(({ key, name, type, required, config, icon, icon_background }, index) => ( - handleConfig({ type, key, index, name, config, icon, icon_background })} - onRemove={() => handleRemoveVar(index)} - /> - ))} + { onPromptVariablesChange?.(list.map(item => item.variable)) }} + handle='.handle' + ghostClass='opacity-50' + animation={150} + > + {promptVariablesWithIds.map((item, index) => { + const { key, name, type, required, config, icon, icon_background } = item.variable + return ( + handleConfig({ type, key, index, name, config, icon, icon_background })} + onRemove={() => handleRemoveVar(index)} + canDrag={canDrag} + /> + ) + })} +
)} diff --git a/web/app/components/app/configuration/config-var/var-item.tsx b/web/app/components/app/configuration/config-var/var-item.tsx index 78ed4b1031..88cd5d7843 100644 --- a/web/app/components/app/configuration/config-var/var-item.tsx +++ b/web/app/components/app/configuration/config-var/var-item.tsx @@ -3,6 +3,7 @@ import type { FC } from 'react' import React, { useState } from 'react' import { RiDeleteBinLine, + RiDraggable, RiEditLine, } from '@remixicon/react' import type { IInputTypeIconProps } from './input-type-icon' @@ -12,6 +13,7 @@ import Badge from '@/app/components/base/badge' import cn from '@/utils/classnames' type ItemProps = { + className?: string readonly?: boolean name: string label: string @@ -19,9 +21,11 @@ type ItemProps = { type: string onEdit: () => void onRemove: () => void + canDrag?: boolean } const VarItem: FC = ({ + className, readonly, name, label, @@ -29,12 +33,16 @@ const VarItem: FC = ({ type, onEdit, onRemove, + canDrag, }) => { const [isDeleting, setIsDeleting] = useState(false) return ( -
- +
+ + {canDrag && ( + + )}
{name} diff --git a/web/app/components/app/configuration/config-vision/index.tsx b/web/app/components/app/configuration/config-vision/index.tsx index f0904b3fd8..bbe322ee7e 100644 --- a/web/app/components/app/configuration/config-vision/index.tsx +++ b/web/app/components/app/configuration/config-vision/index.tsx @@ -2,7 +2,7 @@ import type { FC } from 'react' import React, { useCallback } from 'react' import { useTranslation } from 'react-i18next' -import produce from 'immer' +import { produce } from 'immer' import { useContext } from 'use-context-selector' import ParamConfig from './param-config' import { Vision } from '@/app/components/base/icons/src/vender/features' diff --git a/web/app/components/app/configuration/config-vision/param-config-content.tsx b/web/app/components/app/configuration/config-vision/param-config-content.tsx index f0d8122102..359f79dd57 100644 --- a/web/app/components/app/configuration/config-vision/param-config-content.tsx +++ b/web/app/components/app/configuration/config-vision/param-config-content.tsx @@ -2,7 +2,7 @@ import type { FC } from 'react' import React, { useCallback } from 'react' import { useTranslation } from 'react-i18next' -import produce from 'immer' +import { produce } from 'immer' import OptionCard from '@/app/components/workflow/nodes/_base/components/option-card' import { Resolution, TransferMethod } from '@/types/app' import ParamItem from '@/app/components/base/param-item' diff --git a/web/app/components/app/configuration/config/agent/agent-tools/index.tsx b/web/app/components/app/configuration/config/agent/agent-tools/index.tsx index b4711ea39a..f2b9c105fc 100644 --- a/web/app/components/app/configuration/config/agent/agent-tools/index.tsx +++ b/web/app/components/app/configuration/config/agent/agent-tools/index.tsx @@ -4,7 +4,7 @@ import React, { useCallback, useMemo, useState } from 'react' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' import copy from 'copy-to-clipboard' -import produce from 'immer' +import { produce } from 'immer' import { RiDeleteBinLine, RiEqualizer2Line, diff --git a/web/app/components/app/configuration/config/config-audio.tsx b/web/app/components/app/configuration/config/config-audio.tsx index 5600f8cbb6..5253b7c902 100644 --- a/web/app/components/app/configuration/config/config-audio.tsx +++ b/web/app/components/app/configuration/config/config-audio.tsx @@ -2,7 +2,7 @@ import type { FC } from 'react' import React, { useCallback } from 'react' import { useTranslation } from 'react-i18next' -import produce from 'immer' +import { produce } from 'immer' import { useContext } from 'use-context-selector' import { Microphone01 } from '@/app/components/base/icons/src/vender/features' diff --git a/web/app/components/app/configuration/config/config-document.tsx b/web/app/components/app/configuration/config/config-document.tsx index 9300bbc712..c0e8cc3a2d 100644 --- a/web/app/components/app/configuration/config/config-document.tsx +++ b/web/app/components/app/configuration/config/config-document.tsx @@ -2,7 +2,7 @@ import type { FC } from 'react' import React, { useCallback } from 'react' import { useTranslation } from 'react-i18next' -import produce from 'immer' +import { produce } from 'immer' import { useContext } from 'use-context-selector' import { Document } from '@/app/components/base/icons/src/vender/features' diff --git a/web/app/components/app/configuration/config/feature/use-feature.tsx b/web/app/components/app/configuration/config/feature/use-feature.tsx deleted file mode 100644 index acc08dd4a4..0000000000 --- a/web/app/components/app/configuration/config/feature/use-feature.tsx +++ /dev/null @@ -1,96 +0,0 @@ -import React, { useEffect } from 'react' - -function useFeature({ - introduction, - setIntroduction, - moreLikeThis, - setMoreLikeThis, - suggestedQuestionsAfterAnswer, - setSuggestedQuestionsAfterAnswer, - speechToText, - setSpeechToText, - textToSpeech, - setTextToSpeech, - citation, - setCitation, - annotation, - setAnnotation, - moderation, - setModeration, -}: { - introduction: string - setIntroduction: (introduction: string) => void - moreLikeThis: boolean - setMoreLikeThis: (moreLikeThis: boolean) => void - suggestedQuestionsAfterAnswer: boolean - setSuggestedQuestionsAfterAnswer: (suggestedQuestionsAfterAnswer: boolean) => void - speechToText: boolean - setSpeechToText: (speechToText: boolean) => void - textToSpeech: boolean - setTextToSpeech: (textToSpeech: boolean) => void - citation: boolean - setCitation: (citation: boolean) => void - annotation: boolean - setAnnotation: (annotation: boolean) => void - moderation: boolean - setModeration: (moderation: boolean) => void -}) { - const [tempShowOpeningStatement, setTempShowOpeningStatement] = React.useState(!!introduction) - useEffect(() => { - // wait to api data back - if (introduction) - setTempShowOpeningStatement(true) - }, [introduction]) - - // const [tempMoreLikeThis, setTempMoreLikeThis] = React.useState(moreLikeThis) - // useEffect(() => { - // setTempMoreLikeThis(moreLikeThis) - // }, [moreLikeThis]) - - const featureConfig = { - openingStatement: tempShowOpeningStatement, - moreLikeThis, - suggestedQuestionsAfterAnswer, - speechToText, - textToSpeech, - citation, - annotation, - moderation, - } - const handleFeatureChange = (key: string, value: boolean) => { - switch (key) { - case 'openingStatement': - if (!value) - setIntroduction('') - - setTempShowOpeningStatement(value) - break - case 'moreLikeThis': - setMoreLikeThis(value) - break - case 'suggestedQuestionsAfterAnswer': - setSuggestedQuestionsAfterAnswer(value) - break - case 'speechToText': - setSpeechToText(value) - break - case 'textToSpeech': - setTextToSpeech(value) - break - case 'citation': - setCitation(value) - break - case 'annotation': - setAnnotation(value) - break - case 'moderation': - setModeration(value) - } - } - return { - featureConfig, - handleFeatureChange, - } -} - -export default useFeature diff --git a/web/app/components/app/configuration/config/index.tsx b/web/app/components/app/configuration/config/index.tsx index d0375c6de9..7e130a4e95 100644 --- a/web/app/components/app/configuration/config/index.tsx +++ b/web/app/components/app/configuration/config/index.tsx @@ -2,7 +2,7 @@ import type { FC } from 'react' import React from 'react' import { useContext } from 'use-context-selector' -import produce from 'immer' +import { produce } from 'immer' import { useFormattingChangedDispatcher } from '../debug/hooks' import DatasetConfig from '../dataset-config' import HistoryPanel from '../config-prompt/conversation-history/history-panel' diff --git a/web/app/components/app/configuration/dataset-config/index.tsx b/web/app/components/app/configuration/dataset-config/index.tsx index 65ef74bc27..0c1b9349ae 100644 --- a/web/app/components/app/configuration/dataset-config/index.tsx +++ b/web/app/components/app/configuration/dataset-config/index.tsx @@ -4,7 +4,7 @@ import React, { useCallback, useMemo } from 'react' import { useTranslation } from 'react-i18next' import { intersectionBy } from 'lodash-es' import { useContext } from 'use-context-selector' -import produce from 'immer' +import { produce } from 'immer' import { v4 as uuid4 } from 'uuid' import { useFormattingChangedDispatcher } from '../debug/hooks' import FeaturePanel from '../base/feature-panel' diff --git a/web/app/components/app/configuration/debug/index.tsx b/web/app/components/app/configuration/debug/index.tsx index 9a50d1b872..ac26b82525 100644 --- a/web/app/components/app/configuration/debug/index.tsx +++ b/web/app/components/app/configuration/debug/index.tsx @@ -2,7 +2,7 @@ import type { FC } from 'react' import { useTranslation } from 'react-i18next' import React, { useCallback, useEffect, useRef, useState } from 'react' -import produce, { setAutoFreeze } from 'immer' +import { produce, setAutoFreeze } from 'immer' import { useBoolean } from 'ahooks' import { RiAddLine, diff --git a/web/app/components/app/configuration/hooks/use-advanced-prompt-config.ts b/web/app/components/app/configuration/hooks/use-advanced-prompt-config.ts index 92958cc96d..0a6ac4bb2a 100644 --- a/web/app/components/app/configuration/hooks/use-advanced-prompt-config.ts +++ b/web/app/components/app/configuration/hooks/use-advanced-prompt-config.ts @@ -1,6 +1,6 @@ import { useState } from 'react' import { clone } from 'lodash-es' -import produce from 'immer' +import { produce } from 'immer' import type { ChatPromptConfig, CompletionPromptConfig, ConversationHistoriesRole, PromptItem } from '@/models/debug' import { PromptMode } from '@/models/debug' import { ModelModeType } from '@/types/app' diff --git a/web/app/components/app/configuration/index.tsx b/web/app/components/app/configuration/index.tsx index 20229c9717..a1710c8f39 100644 --- a/web/app/components/app/configuration/index.tsx +++ b/web/app/components/app/configuration/index.tsx @@ -6,7 +6,7 @@ import { basePath } from '@/utils/var' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' import { usePathname } from 'next/navigation' -import produce from 'immer' +import { produce } from 'immer' import { useBoolean, useGetState } from 'ahooks' import { clone, isEqual } from 'lodash-es' import { CodeBracketIcon } from '@heroicons/react/20/solid' diff --git a/web/app/components/app/configuration/prompt-mode/advanced-mode-waring.tsx b/web/app/components/app/configuration/prompt-mode/advanced-mode-waring.tsx deleted file mode 100644 index f207cddd16..0000000000 --- a/web/app/components/app/configuration/prompt-mode/advanced-mode-waring.tsx +++ /dev/null @@ -1,50 +0,0 @@ -'use client' -import type { FC } from 'react' -import React from 'react' -import { useTranslation } from 'react-i18next' -import { useDocLink } from '@/context/i18n' -type Props = { - onReturnToSimpleMode: () => void -} - -const AdvancedModeWarning: FC = ({ - onReturnToSimpleMode, -}) => { - const { t } = useTranslation() - const docLink = useDocLink() - const [show, setShow] = React.useState(true) - if (!show) - return null - return ( -
-
{t('appDebug.promptMode.advancedWarning.title')}
-
-
- {t('appDebug.promptMode.advancedWarning.description')} - - {t('appDebug.promptMode.advancedWarning.learnMore')} - -
- -
-
-
{t('appDebug.promptMode.switchBack')}
-
-
setShow(false)} - >{t('appDebug.promptMode.advancedWarning.ok')}
-
- -
-
- ) -} -export default React.memo(AdvancedModeWarning) diff --git a/web/app/components/app/workflow-log/detail.tsx b/web/app/components/app/workflow-log/detail.tsx index 812438c0ed..7ce701dd68 100644 --- a/web/app/components/app/workflow-log/detail.tsx +++ b/web/app/components/app/workflow-log/detail.tsx @@ -1,9 +1,11 @@ 'use client' import type { FC } from 'react' import { useTranslation } from 'react-i18next' -import { RiCloseLine } from '@remixicon/react' +import { RiCloseLine, RiPlayLargeLine } from '@remixicon/react' import Run from '@/app/components/workflow/run' import { useStore } from '@/app/components/app/store' +import TooltipPlus from '@/app/components/base/tooltip' +import { useRouter } from 'next/navigation' type ILogDetail = { runID: string @@ -13,13 +15,34 @@ type ILogDetail = { const DetailPanel: FC = ({ runID, onClose }) => { const { t } = useTranslation() const appDetail = useStore(state => state.appDetail) + const router = useRouter() + + const handleReplay = () => { + if (!appDetail?.id) return + router.push(`/app/${appDetail.id}/workflow?replayRunId=${runID}`) + } return (
-

{t('appLog.runDetail.workflowTitle')}

+
+

{t('appLog.runDetail.workflowTitle')}

+ + + +
, HTMLTextAreaElement> - & { outerClassName?: string } - -const AutoHeightTextarea = ( - { - ref: outRef, - outerClassName, - value, - className, - placeholder, - autoFocus, - disabled, - ...rest - }: AutoHeightTextareaProps & { - ref: React.RefObject; - }, -) => { - const innerRef = useRef(null) - const ref = outRef || innerRef - - useEffect(() => { - if (autoFocus && !disabled && value) { - if (typeof ref !== 'function') { - ref.current?.setSelectionRange(`${value}`.length, `${value}`.length) - ref.current?.focus() - } - } - }, [autoFocus, disabled, ref]) - return ( - (
-
-
- {!value ? placeholder : `${value}`.replace(/\n$/, '\n ')} -
-