refactor: replace print statements with proper logging (#25773)

This commit is contained in:
-LAN- 2025-09-18 20:35:47 +08:00 committed by GitHub
parent bb01c31f30
commit 4ba1292455
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 30 additions and 12 deletions

View File

@ -30,6 +30,7 @@ select = [
"RUF022", # unsorted-dunder-all
"S506", # unsafe-yaml-load
"SIM", # flake8-simplify rules
"T201", # print-found
"TRY400", # error-instead-of-exception
"TRY401", # verbose-log-message
"UP", # pyupgrade rules
@ -91,11 +92,18 @@ ignore = [
"configs/*" = [
"N802", # invalid-function-name
]
"core/model_runtime/callbacks/base_callback.py" = [
"T201",
]
"core/workflow/callbacks/workflow_logging_callback.py" = [
"T201",
]
"libs/gmpy2_pkcs10aep_cipher.py" = [
"N803", # invalid-argument-name
]
"tests/*" = [
"F811", # redefined-while-unused
"T201", # allow print in tests
]
[lint.pyflakes]

View File

@ -739,18 +739,18 @@ where sites.id is null limit 1000"""
try:
app = db.session.query(App).where(App.id == app_id).first()
if not app:
print(f"App {app_id} not found")
logger.info("App %s not found", app_id)
continue
tenant = app.tenant
if tenant:
accounts = tenant.get_accounts()
if not accounts:
print(f"Fix failed for app {app.id}")
logger.info("Fix failed for app %s", app.id)
continue
account = accounts[0]
print(f"Fixing missing site for app {app.id}")
logger.info("Fixing missing site for app %s", app.id)
app_was_created.send(app, account=account)
except Exception:
failed_app_ids.append(app_id)

View File

@ -417,7 +417,7 @@ class WeaveDataTrace(BaseTraceInstance):
if not login_status:
raise ValueError("Weave login failed")
else:
print("Weave login successful")
logger.info("Weave login successful")
return True
except Exception as e:
logger.debug("Weave API check failed: %s", str(e))

View File

@ -229,7 +229,7 @@ class OceanBaseVector(BaseVector):
try:
metadata = json.loads(metadata_str)
except json.JSONDecodeError:
print(f"Invalid JSON metadata: {metadata_str}")
logger.warning("Invalid JSON metadata: %s", metadata_str)
metadata = {}
metadata["score"] = score
docs.append(Document(page_content=_text, metadata=metadata))

View File

@ -1,5 +1,6 @@
import array
import json
import logging
import re
import uuid
from typing import Any
@ -19,6 +20,8 @@ from core.rag.models.document import Document
from extensions.ext_redis import redis_client
from models.dataset import Dataset
logger = logging.getLogger(__name__)
oracledb.defaults.fetch_lobs = False
@ -180,8 +183,8 @@ class OracleVector(BaseVector):
value,
)
conn.commit()
except Exception as e:
print(e)
except Exception:
logger.exception("Failed to insert record %s into %s", value[0], self.table_name)
conn.close()
return pks

View File

@ -1,4 +1,5 @@
import json
import logging
import uuid
from typing import Any
@ -23,6 +24,8 @@ from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.models.document import Document
from extensions.ext_redis import redis_client
logger = logging.getLogger(__name__)
Base = declarative_base() # type: Any
@ -187,8 +190,8 @@ class RelytVector(BaseVector):
delete_condition = chunks_table.c.id.in_(ids)
conn.execute(chunks_table.delete().where(delete_condition))
return True
except Exception as e:
print("Delete operation failed:", str(e))
except Exception:
logger.exception("Delete operation failed for collection %s", self._collection_name)
return False
def delete_by_metadata_field(self, key: str, value: str):

View File

@ -164,8 +164,8 @@ class TiDBVector(BaseVector):
delete_condition = table.c.id.in_(ids)
conn.execute(table.delete().where(delete_condition))
return True
except Exception as e:
print("Delete operation failed:", str(e))
except Exception:
logger.exception("Delete operation failed for collection %s", self._collection_name)
return False
def get_ids_by_metadata_field(self, key: str, value: str):

View File

@ -46,7 +46,11 @@ limit 1000"""
record_id = str(i.id)
provider_name = str(i.provider_name)
retrieval_model = i.retrieval_model
print(type(retrieval_model))
logger.debug(
"Processing dataset %s with retrieval model of type %s",
record_id,
type(retrieval_model),
)
if record_id in failed_ids:
continue