[Log] Add trace log and add loggingInstrumentor tool (#4692)

* add trace logger and trace print

* trigger ci

* fix unittest

* translate notes and add copyright

---------

Co-authored-by: Jiang-Jia-Jun <163579578+Jiang-Jia-Jun@users.noreply.github.com>
Co-authored-by: YuBaoku <49938469+EmmonsCurse@users.noreply.github.com>
This commit is contained in:
qwes5s5
2025-11-17 11:08:57 +08:00
committed by GitHub
parent 5444af6ff6
commit 36216e62f0
21 changed files with 941 additions and 43 deletions
@@ -40,6 +40,8 @@ from fastdeploy.entrypoints.openai.protocol import (
)
from fastdeploy.entrypoints.openai.response_processors import ChatResponseProcessor
from fastdeploy.metrics.work_metrics import work_process_metrics
from fastdeploy.trace.constants import LoggingEventName
from fastdeploy.trace.trace_logger import print as trace_print
from fastdeploy.utils import (
ErrorCode,
ErrorType,
@@ -448,6 +450,7 @@ class OpenAIServingChat:
finally:
await self.engine_client.connection_manager.cleanup_request(request_id)
self.engine_client.semaphore.release()
trace_print(LoggingEventName.POSTPROCESSING_END, request_id, getattr(request, "user", ""))
api_server_logger.info(f"release {request_id} {self.engine_client.semaphore.status()}")
yield "data: [DONE]\n\n"
@@ -599,6 +602,7 @@ class OpenAIServingChat:
choices=choices,
usage=usage,
)
trace_print(LoggingEventName.POSTPROCESSING_END, request_id, getattr(request, "user", ""))
api_server_logger.info(f"Chat response: {res.model_dump_json()}")
return res