Fix: replace stdout prints with debug logging (#25931)

Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
This commit is contained in:
-LAN- 2025-09-18 21:03:20 +08:00 committed by GitHub
parent aa69d90489
commit a173546c8d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 10 additions and 4 deletions

View File

@ -7,7 +7,7 @@ _logger = logging.getLogger(__name__)
def _log(message: str): def _log(message: str):
print(message, flush=True) _logger.debug(message)
# grpc gevent # grpc gevent

View File

@ -1544,7 +1544,7 @@ def transform_datasource_credentials():
if jina_plugin_id not in installed_plugins_ids: if jina_plugin_id not in installed_plugins_ids:
if jina_plugin_unique_identifier: if jina_plugin_unique_identifier:
# install jina plugin # install jina plugin
print(jina_plugin_unique_identifier) logger.debug("Installing Jina plugin %s", jina_plugin_unique_identifier)
PluginService.install_from_marketplace_pkg(tenant_id, [jina_plugin_unique_identifier]) PluginService.install_from_marketplace_pkg(tenant_id, [jina_plugin_unique_identifier])
auth_count = 0 auth_count = 0

View File

@ -1,4 +1,5 @@
import json import json
import logging
import threading import threading
from collections.abc import Mapping, MutableMapping from collections.abc import Mapping, MutableMapping
from pathlib import Path from pathlib import Path
@ -8,6 +9,8 @@ from typing import Any, ClassVar, Optional
class SchemaRegistry: class SchemaRegistry:
"""Schema registry manages JSON schemas with version support""" """Schema registry manages JSON schemas with version support"""
logger: ClassVar[logging.Logger] = logging.getLogger(__name__)
_default_instance: ClassVar[Optional["SchemaRegistry"]] = None _default_instance: ClassVar[Optional["SchemaRegistry"]] = None
_lock: ClassVar[threading.Lock] = threading.Lock() _lock: ClassVar[threading.Lock] = threading.Lock()
@ -83,7 +86,7 @@ class SchemaRegistry:
self.metadata[uri] = metadata self.metadata[uri] = metadata
except (OSError, json.JSONDecodeError) as e: except (OSError, json.JSONDecodeError) as e:
print(f"Warning: failed to load schema {version}/{schema_name}: {e}") self.logger.warning("Failed to load schema %s/%s: %s", version, schema_name, e)
def get_schema(self, uri: str) -> Any | None: def get_schema(self, uri: str) -> Any | None:
"""Retrieves a schema by URI with version support""" """Retrieves a schema by URI with version support"""

View File

@ -1,4 +1,5 @@
import json import json
import logging
from datetime import UTC, datetime from datetime import UTC, datetime
from pathlib import Path from pathlib import Path
from uuid import uuid4 from uuid import uuid4
@ -17,6 +18,8 @@ from services.entities.knowledge_entities.rag_pipeline_entities import Knowledge
from services.plugin.plugin_migration import PluginMigration from services.plugin.plugin_migration import PluginMigration
from services.plugin.plugin_service import PluginService from services.plugin.plugin_service import PluginService
logger = logging.getLogger(__name__)
class RagPipelineTransformService: class RagPipelineTransformService:
def transform_dataset(self, dataset_id: str): def transform_dataset(self, dataset_id: str):
@ -257,7 +260,7 @@ class RagPipelineTransformService:
if plugin_unique_identifier: if plugin_unique_identifier:
need_install_plugin_unique_identifiers.append(plugin_unique_identifier) need_install_plugin_unique_identifiers.append(plugin_unique_identifier)
if need_install_plugin_unique_identifiers: if need_install_plugin_unique_identifiers:
print(need_install_plugin_unique_identifiers) logger.debug("Installing missing pipeline plugins %s", need_install_plugin_unique_identifiers)
PluginService.install_from_marketplace_pkg(tenant_id, need_install_plugin_unique_identifiers) PluginService.install_from_marketplace_pkg(tenant_id, need_install_plugin_unique_identifiers)
def _transfrom_to_empty_pipeline(self, dataset: Dataset): def _transfrom_to_empty_pipeline(self, dataset: Dataset):