[Feature]Log Format Normalization and Trace Log Optimization (#6370)

* log refactor

* log refactor 2

* log refactor 3
This commit is contained in:
qwes5s5
2026-03-03 11:31:45 +08:00
committed by GitHub
parent 1cae7a0d53
commit 375b5b7b21
15 changed files with 870 additions and 80 deletions
+63 -4
View File
@@ -14,6 +14,58 @@
# limitations under the License.
"""
# Configure root logger first to unify log formats
# This must be done before importing any modules that may use the logger
import logging
from contextlib import contextmanager
# Create standard format (without color)
_root_formatter = logging.Formatter(
"%(levelname)-8s %(asctime)s %(process)-5s %(filename)s[line:%(lineno)d] %(message)s"
)
# Save original getLogger before any patching
_original_getLogger = logging.getLogger
@contextmanager
def _intercept_paddle_loggers():
"""Intercept and configure paddle loggers during import."""
def _patched(name=None):
if name and str(name).startswith("paddle"):
# Configure paddle logger immediately on first access
return _configure_logger(name)
return _original_getLogger(name)
logging.getLogger = _patched
try:
yield
finally:
logging.getLogger = _original_getLogger
def _configure_logger(name=None):
"""Configure logger with unified format.
Args:
name: Logger name. If None, configures root logger.
"""
# Use original getLogger to avoid recursion when interceptor is active
logger = _original_getLogger(name)
logger.setLevel(logging.INFO)
for handler in logger.handlers[:]:
logger.removeHandler(handler)
handler = logging.StreamHandler()
handler.setFormatter(_root_formatter)
logger.addHandler(handler)
logger.propagate = False
return logger
# Configure root logger
_configure_logger()
import os
import uuid
@@ -31,7 +83,8 @@ if os.getenv("PROMETHEUS_MULTIPROC_DIR", "") == "":
import typing
import paddle
with _intercept_paddle_loggers():
import paddle
# first import prometheus setup to set PROMETHEUS_MULTIPROC_DIR
# otherwise, the Prometheus package will be imported first,
@@ -45,6 +98,13 @@ setup_multiprocess_prometheus()
from paddleformers.utils.log import logger as pf_logger
# Configure paddleformers loggers with unified format
_configure_logger("paddleformers")
# Also configure pf_logger.logger (if it is a Logger object)
if hasattr(pf_logger, "logger") and isinstance(pf_logger.logger, logging.Logger):
_configure_logger(pf_logger.logger.name)
from fastdeploy.engine.sampling_params import SamplingParams
from fastdeploy.entrypoints.llm import LLM
from fastdeploy.utils import (
@@ -66,9 +126,8 @@ paddle.compat.enable_torch_proxy(scope={"triton"})
if envs.FD_DEBUG != 1:
import logging
pf_logger.logger.setLevel(logging.INFO)
# Log level has been configured above
pass
try:
import use_triton_in_paddle