mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2026-04-23 00:17:25 +08:00
[Feature] implement log channel separation and request log level system (#7190)
* feat: implement log channel separation and request log level system * fix: log system improvements based on review * add request_id to error logs, use RequestLogLevel enum, and unify logger implementation from utils to logger module
This commit is contained in:
@@ -46,8 +46,12 @@ class LoggerTests(unittest.TestCase):
|
||||
shutil.rmtree(self.tmp_dir, ignore_errors=True)
|
||||
|
||||
def test_unified_logger(self):
|
||||
"""Test _get_unified_logger through instance"""
|
||||
test_cases = [(None, "fastdeploy"), ("module", "fastdeploy.module"), ("fastdeploy.utils", "fastdeploy.utils")]
|
||||
"""Test _get_unified_logger through instance (uses main channel)"""
|
||||
test_cases = [
|
||||
(None, "fastdeploy.main"),
|
||||
("module", "fastdeploy.main.module"),
|
||||
("fastdeploy.utils", "fastdeploy.utils"), # 已有 fastdeploy. 前缀的保持不变
|
||||
]
|
||||
|
||||
for name, expected in test_cases:
|
||||
with self.subTest(name=name):
|
||||
|
||||
@@ -0,0 +1,91 @@
|
||||
# Copyright (c) 2025 PaddlePaddle Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from fastdeploy.logger.config import resolve_log_level, resolve_request_logging_defaults
|
||||
|
||||
|
||||
class TestResolveLogLevel(unittest.TestCase):
|
||||
"""测试 resolve_log_level 函数"""
|
||||
|
||||
def test_explicit_info_level(self):
|
||||
"""显式设置 INFO 级别"""
|
||||
result = resolve_log_level(raw_level="INFO")
|
||||
self.assertEqual(result, "INFO")
|
||||
|
||||
def test_explicit_debug_level(self):
|
||||
"""显式设置 DEBUG 级别"""
|
||||
result = resolve_log_level(raw_level="DEBUG")
|
||||
self.assertEqual(result, "DEBUG")
|
||||
|
||||
def test_case_insensitive(self):
|
||||
"""级别名称应该大小写不敏感"""
|
||||
self.assertEqual(resolve_log_level(raw_level="info"), "INFO")
|
||||
self.assertEqual(resolve_log_level(raw_level="debug"), "DEBUG")
|
||||
|
||||
def test_invalid_level_raises(self):
|
||||
"""无效级别应该抛出 ValueError"""
|
||||
with self.assertRaises(ValueError) as ctx:
|
||||
resolve_log_level(raw_level="INVALID")
|
||||
self.assertIn("Unsupported FD_LOG_LEVEL", str(ctx.exception))
|
||||
|
||||
def test_debug_enabled_fallback(self):
|
||||
"""FD_DEBUG=1 应该返回 DEBUG"""
|
||||
result = resolve_log_level(raw_level=None, debug_enabled=1)
|
||||
self.assertEqual(result, "DEBUG")
|
||||
|
||||
def test_debug_disabled_fallback(self):
|
||||
"""FD_DEBUG=0 应该返回 INFO"""
|
||||
result = resolve_log_level(raw_level=None, debug_enabled=0)
|
||||
self.assertEqual(result, "INFO")
|
||||
|
||||
def test_env_fd_log_level_priority(self):
|
||||
"""FD_LOG_LEVEL 环境变量优先级高于 FD_DEBUG"""
|
||||
with patch.dict("os.environ", {"FD_LOG_LEVEL": "INFO", "FD_DEBUG": "1"}):
|
||||
result = resolve_log_level()
|
||||
self.assertEqual(result, "INFO")
|
||||
|
||||
def test_env_fd_debug_fallback(self):
|
||||
"""无 FD_LOG_LEVEL 时使用 FD_DEBUG"""
|
||||
with patch.dict("os.environ", {"FD_DEBUG": "1"}, clear=True):
|
||||
result = resolve_log_level()
|
||||
self.assertEqual(result, "DEBUG")
|
||||
|
||||
|
||||
class TestResolveRequestLoggingDefaults(unittest.TestCase):
|
||||
"""测试 resolve_request_logging_defaults 函数"""
|
||||
|
||||
def test_default_values(self):
|
||||
"""默认值测试"""
|
||||
with patch.dict("os.environ", {}, clear=True):
|
||||
result = resolve_request_logging_defaults()
|
||||
self.assertEqual(result["enabled"], 1)
|
||||
self.assertEqual(result["level"], 2)
|
||||
self.assertEqual(result["max_len"], 2048)
|
||||
|
||||
def test_custom_values(self):
|
||||
"""自定义值测试"""
|
||||
with patch.dict(
|
||||
"os.environ", {"FD_LOG_REQUESTS": "0", "FD_LOG_REQUESTS_LEVEL": "2", "FD_LOG_MAX_LEN": "1024"}
|
||||
):
|
||||
result = resolve_request_logging_defaults()
|
||||
self.assertEqual(result["enabled"], 0)
|
||||
self.assertEqual(result["level"], 2)
|
||||
self.assertEqual(result["max_len"], 1024)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,176 @@
|
||||
# Copyright (c) 2025 PaddlePaddle Authors. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from fastdeploy.logger.request_logger import (
|
||||
RequestLogLevel,
|
||||
_should_log,
|
||||
_truncate,
|
||||
log_request,
|
||||
log_request_error,
|
||||
)
|
||||
|
||||
|
||||
class TestRequestLogLevel(unittest.TestCase):
|
||||
"""Test RequestLogLevel enum"""
|
||||
|
||||
def test_level_values(self):
|
||||
"""Test level values"""
|
||||
self.assertEqual(int(RequestLogLevel.LIFECYCLE), 0)
|
||||
self.assertEqual(int(RequestLogLevel.STAGES), 1)
|
||||
self.assertEqual(int(RequestLogLevel.CONTENT), 2)
|
||||
self.assertEqual(int(RequestLogLevel.FULL), 3)
|
||||
|
||||
|
||||
class TestShouldLog(unittest.TestCase):
|
||||
"""Test _should_log function"""
|
||||
|
||||
def test_disabled_returns_false(self):
|
||||
"""FD_LOG_REQUESTS=0 should return False"""
|
||||
with patch("fastdeploy.logger.request_logger.envs") as mock_envs:
|
||||
mock_envs.FD_LOG_REQUESTS = 0
|
||||
mock_envs.FD_LOG_REQUESTS_LEVEL = 3
|
||||
self.assertFalse(_should_log(RequestLogLevel.LIFECYCLE))
|
||||
|
||||
def test_level_within_threshold(self):
|
||||
"""Level within threshold should return True"""
|
||||
with patch("fastdeploy.logger.request_logger.envs") as mock_envs:
|
||||
mock_envs.FD_LOG_REQUESTS = 1
|
||||
mock_envs.FD_LOG_REQUESTS_LEVEL = 2
|
||||
self.assertTrue(_should_log(RequestLogLevel.LIFECYCLE))
|
||||
self.assertTrue(_should_log(RequestLogLevel.STAGES))
|
||||
self.assertTrue(_should_log(RequestLogLevel.CONTENT))
|
||||
|
||||
def test_level_above_threshold(self):
|
||||
"""Level above threshold should return False"""
|
||||
with patch("fastdeploy.logger.request_logger.envs") as mock_envs:
|
||||
mock_envs.FD_LOG_REQUESTS = 1
|
||||
mock_envs.FD_LOG_REQUESTS_LEVEL = 1
|
||||
self.assertFalse(_should_log(RequestLogLevel.CONTENT))
|
||||
self.assertFalse(_should_log(RequestLogLevel.FULL))
|
||||
|
||||
|
||||
class TestTruncate(unittest.TestCase):
|
||||
"""Test _truncate function"""
|
||||
|
||||
def test_short_text_unchanged(self):
|
||||
"""Short text should remain unchanged"""
|
||||
with patch("fastdeploy.logger.request_logger.envs") as mock_envs:
|
||||
mock_envs.FD_LOG_MAX_LEN = 100
|
||||
result = _truncate("short text")
|
||||
self.assertEqual(result, "short text")
|
||||
|
||||
def test_long_text_truncated(self):
|
||||
"""Long text should be truncated"""
|
||||
with patch("fastdeploy.logger.request_logger.envs") as mock_envs:
|
||||
mock_envs.FD_LOG_MAX_LEN = 10
|
||||
result = _truncate("this is a very long text")
|
||||
self.assertEqual(result, "this is a ")
|
||||
self.assertEqual(len(result), 10)
|
||||
|
||||
def test_non_string_converted(self):
|
||||
"""Non-string should be converted"""
|
||||
with patch("fastdeploy.logger.request_logger.envs") as mock_envs:
|
||||
mock_envs.FD_LOG_MAX_LEN = 100
|
||||
result = _truncate(12345)
|
||||
self.assertEqual(result, "12345")
|
||||
|
||||
|
||||
class TestLogRequest(unittest.TestCase):
|
||||
"""Test log_request function"""
|
||||
|
||||
@patch("fastdeploy.logger._request_logger")
|
||||
def test_log_when_enabled(self, mock_logger):
|
||||
"""Should log when enabled"""
|
||||
with patch("fastdeploy.logger.request_logger.envs") as mock_envs:
|
||||
mock_envs.FD_LOG_REQUESTS = 1
|
||||
mock_envs.FD_LOG_REQUESTS_LEVEL = 0
|
||||
mock_envs.FD_LOG_MAX_LEN = 2048
|
||||
|
||||
log_request(RequestLogLevel.LIFECYCLE, message="test {value}", value="hello")
|
||||
mock_logger.info.assert_called_once()
|
||||
call_args = mock_logger.info.call_args[0][0]
|
||||
self.assertEqual(call_args, "test hello")
|
||||
|
||||
@patch("fastdeploy.logger._request_logger")
|
||||
def test_no_log_when_disabled(self, mock_logger):
|
||||
"""Should not log when disabled"""
|
||||
with patch("fastdeploy.logger.request_logger.envs") as mock_envs:
|
||||
mock_envs.FD_LOG_REQUESTS = 0
|
||||
mock_envs.FD_LOG_REQUESTS_LEVEL = 3
|
||||
|
||||
log_request(RequestLogLevel.LIFECYCLE, message="test {value}", value="hello")
|
||||
mock_logger.info.assert_not_called()
|
||||
|
||||
@patch("fastdeploy.logger._request_logger")
|
||||
def test_no_log_when_level_too_high(self, mock_logger):
|
||||
"""Should not log when level is too high"""
|
||||
with patch("fastdeploy.logger.request_logger.envs") as mock_envs:
|
||||
mock_envs.FD_LOG_REQUESTS = 1
|
||||
mock_envs.FD_LOG_REQUESTS_LEVEL = 0
|
||||
|
||||
log_request(RequestLogLevel.CONTENT, message="test {value}", value="hello")
|
||||
mock_logger.info.assert_not_called()
|
||||
|
||||
@patch("fastdeploy.logger._request_logger")
|
||||
def test_content_level_truncates_content(self, mock_logger):
|
||||
"""CONTENT level should truncate content"""
|
||||
with patch("fastdeploy.logger.request_logger.envs") as mock_envs:
|
||||
mock_envs.FD_LOG_REQUESTS = 1
|
||||
mock_envs.FD_LOG_REQUESTS_LEVEL = 3
|
||||
mock_envs.FD_LOG_MAX_LEN = 5
|
||||
|
||||
log_request(RequestLogLevel.CONTENT, message="content: {data}", data="very long data")
|
||||
mock_logger.info.assert_called_once()
|
||||
call_args = mock_logger.info.call_args[0][0]
|
||||
self.assertEqual(call_args, "content: very ")
|
||||
|
||||
@patch("fastdeploy.logger._request_logger")
|
||||
def test_lifecycle_level_no_truncation(self, mock_logger):
|
||||
"""LIFECYCLE level should not truncate content"""
|
||||
with patch("fastdeploy.logger.request_logger.envs") as mock_envs:
|
||||
mock_envs.FD_LOG_REQUESTS = 1
|
||||
mock_envs.FD_LOG_REQUESTS_LEVEL = 3
|
||||
mock_envs.FD_LOG_MAX_LEN = 5
|
||||
|
||||
log_request(RequestLogLevel.LIFECYCLE, message="content: {data}", data="very long data")
|
||||
mock_logger.info.assert_called_once()
|
||||
call_args = mock_logger.info.call_args[0][0]
|
||||
self.assertEqual(call_args, "content: very long data")
|
||||
|
||||
|
||||
class TestLogRequestError(unittest.TestCase):
|
||||
"""Test log_request_error function"""
|
||||
|
||||
@patch("fastdeploy.logger._request_logger")
|
||||
def test_error_with_fields(self, mock_logger):
|
||||
"""Error log with fields should format message"""
|
||||
log_request_error(message="request {request_id} failed: {error}", request_id="req-123", error="timeout")
|
||||
mock_logger.error.assert_called_once()
|
||||
call_args = mock_logger.error.call_args[0][0]
|
||||
self.assertEqual(call_args, "request req-123 failed: timeout")
|
||||
|
||||
@patch("fastdeploy.logger._request_logger")
|
||||
def test_error_without_fields(self, mock_logger):
|
||||
"""Error log without fields should not call format"""
|
||||
log_request_error(message="simple error message")
|
||||
mock_logger.error.assert_called_once()
|
||||
call_args = mock_logger.error.call_args[0][0]
|
||||
self.assertEqual(call_args, "simple error message")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -43,58 +43,18 @@ class TestSetupLogging(unittest.TestCase):
|
||||
delattr(setup_logging, "_configured")
|
||||
|
||||
def test_log_dir_created(self):
|
||||
"""Log directory should be created"""
|
||||
nested = os.path.join(self.temp_dir, "a", "b", "c")
|
||||
setup_logging(log_dir=nested)
|
||||
self.assertTrue(Path(nested).is_dir())
|
||||
|
||||
def test_default_config_fallback(self):
|
||||
"""Pass a non-existent config_file to trigger default_config"""
|
||||
fake_cfg = os.path.join(self.temp_dir, "no_such_cfg.json")
|
||||
setup_logging(config_file=fake_cfg)
|
||||
logger = logging.getLogger("fastdeploy")
|
||||
self.assertTrue(logger.handlers)
|
||||
handler_classes = [h.__class__.__name__ for h in logger.handlers]
|
||||
self.assertIn("TimedRotatingFileHandler", handler_classes)
|
||||
|
||||
def test_debug_level_affects_handlers(self):
|
||||
"""FD_DEBUG=1 should force DEBUG level"""
|
||||
with patch("fastdeploy.envs.FD_DEBUG", 1):
|
||||
with patch("logging.config.dictConfig") as mock_cfg:
|
||||
setup_logging()
|
||||
called_config = mock_cfg.call_args[0][0]
|
||||
for handler in called_config["handlers"].values():
|
||||
self.assertIn("formatter", handler)
|
||||
self.assertEqual(called_config["handlers"]["console_stdout"]["level"], "DEBUG")
|
||||
|
||||
@patch("logging.config.dictConfig")
|
||||
def test_custom_config_with_dailyrotating_and_debug(self, mock_dict):
|
||||
custom_cfg = {
|
||||
"version": 1,
|
||||
"handlers": {
|
||||
"daily": {
|
||||
"class": "logging.handlers.DailyRotatingFileHandler",
|
||||
"level": "INFO",
|
||||
"formatter": "plain",
|
||||
}
|
||||
},
|
||||
"loggers": {"fastdeploy": {"handlers": ["daily"], "level": "INFO"}},
|
||||
}
|
||||
cfg_path = Path(self.temp_dir) / "cfg.json"
|
||||
cfg_path.write_text(json.dumps(custom_cfg))
|
||||
|
||||
with patch("fastdeploy.envs.FD_DEBUG", 1):
|
||||
setup_logging(config_file=str(cfg_path))
|
||||
|
||||
config_used = mock_dict.call_args[0][0]
|
||||
self.assertIn("daily", config_used["handlers"])
|
||||
self.assertEqual(config_used["handlers"]["daily"]["level"], "DEBUG")
|
||||
self.assertIn("backupCount", config_used["handlers"]["daily"])
|
||||
|
||||
def test_configure_once(self):
|
||||
"""Ensure idempotent setup"""
|
||||
l1 = setup_logging()
|
||||
l2 = setup_logging()
|
||||
self.assertIs(l1, l2)
|
||||
"""Ensure idempotent setup - only configures once"""
|
||||
setup_logging()
|
||||
self.assertTrue(setup_logging._configured)
|
||||
# Second call should not raise
|
||||
setup_logging()
|
||||
self.assertTrue(setup_logging._configured)
|
||||
|
||||
def test_envs_priority_used_for_log_dir(self):
|
||||
"""When log_dir=None, should use envs.FD_LOG_DIR"""
|
||||
@@ -102,15 +62,45 @@ class TestSetupLogging(unittest.TestCase):
|
||||
setup_logging()
|
||||
self.assertTrue(os.path.exists(self.temp_dir))
|
||||
|
||||
@patch("logging.StreamHandler.emit")
|
||||
def test_console_colored(self, mock_emit):
|
||||
setup_logging()
|
||||
logger = logging.getLogger("fastdeploy")
|
||||
logger.error("color test")
|
||||
self.assertTrue(mock_emit.called)
|
||||
def test_log_dir_stored(self):
|
||||
"""setup_logging should store log_dir for later use"""
|
||||
setup_logging(log_dir=self.temp_dir)
|
||||
self.assertEqual(setup_logging._log_dir, self.temp_dir)
|
||||
|
||||
@patch("logging.config.dictConfig")
|
||||
def test_backup_count_merging(self, mock_dict):
|
||||
def test_no_config_file_no_dictconfig(self):
|
||||
"""When config_file is not provided, dictConfig should not be called"""
|
||||
with patch("logging.config.dictConfig") as mock_dict:
|
||||
setup_logging()
|
||||
self.assertFalse(mock_dict.called)
|
||||
|
||||
def test_config_file_with_dictconfig(self):
|
||||
"""When config_file is provided, dictConfig should be called"""
|
||||
custom_cfg = {
|
||||
"version": 1,
|
||||
"handlers": {},
|
||||
"loggers": {},
|
||||
}
|
||||
cfg_path = Path(self.temp_dir) / "cfg.json"
|
||||
cfg_path.write_text(json.dumps(custom_cfg))
|
||||
|
||||
with patch("logging.config.dictConfig") as mock_dict:
|
||||
setup_logging(config_file=str(cfg_path))
|
||||
self.assertTrue(mock_dict.called)
|
||||
|
||||
def test_config_file_not_exists_uses_default(self):
|
||||
"""When config_file doesn't exist, use default config"""
|
||||
fake_cfg = os.path.join(self.temp_dir, "no_such_cfg.json")
|
||||
|
||||
with patch("logging.config.dictConfig") as mock_dict:
|
||||
setup_logging(config_file=fake_cfg)
|
||||
self.assertTrue(mock_dict.called)
|
||||
# Should use default config
|
||||
config_used = mock_dict.call_args[0][0]
|
||||
self.assertIn("handlers", config_used)
|
||||
self.assertIn("loggers", config_used)
|
||||
|
||||
def test_backup_count_merging(self):
|
||||
"""backupCount should be merged into handler config"""
|
||||
custom_cfg = {
|
||||
"version": 1,
|
||||
"handlers": {"daily": {"class": "logging.handlers.DailyRotatingFileHandler", "formatter": "plain"}},
|
||||
@@ -119,33 +109,77 @@ class TestSetupLogging(unittest.TestCase):
|
||||
cfg_path = Path(self.temp_dir) / "cfg.json"
|
||||
cfg_path.write_text(json.dumps(custom_cfg))
|
||||
|
||||
setup_logging(config_file=str(cfg_path))
|
||||
with patch("logging.config.dictConfig") as mock_dict:
|
||||
setup_logging(config_file=str(cfg_path))
|
||||
config_used = mock_dict.call_args[0][0]
|
||||
self.assertEqual(config_used["handlers"]["daily"]["backupCount"], 3)
|
||||
|
||||
config_used = mock_dict.call_args[0][0]
|
||||
self.assertEqual(config_used["handlers"]["daily"]["backupCount"], 3)
|
||||
def test_debug_level_affects_handlers(self):
|
||||
"""FD_DEBUG=1 should force DEBUG level in handlers"""
|
||||
custom_cfg = {
|
||||
"version": 1,
|
||||
"handlers": {"test": {"class": "logging.StreamHandler", "level": "INFO"}},
|
||||
"loggers": {},
|
||||
}
|
||||
cfg_path = Path(self.temp_dir) / "cfg.json"
|
||||
cfg_path.write_text(json.dumps(custom_cfg))
|
||||
|
||||
@patch("logging.config.dictConfig")
|
||||
def test_error_logs_use_stderr_handler(self, mock_dict):
|
||||
"""ERROR级别日志应该使用stderr输出"""
|
||||
setup_logging()
|
||||
config_used = mock_dict.call_args[0][0]
|
||||
self.assertIn("console_stderr", config_used["handlers"])
|
||||
self.assertEqual(config_used["handlers"]["console_stderr"]["stream"], "ext://sys.stderr")
|
||||
self.assertEqual(config_used["handlers"]["console_stderr"]["level"], "ERROR")
|
||||
with patch("fastdeploy.envs.FD_DEBUG", 1):
|
||||
with patch("logging.config.dictConfig") as mock_dict:
|
||||
setup_logging(config_file=str(cfg_path))
|
||||
config_used = mock_dict.call_args[0][0]
|
||||
self.assertEqual(config_used["handlers"]["test"]["level"], "DEBUG")
|
||||
|
||||
@patch("logging.config.dictConfig")
|
||||
def test_console_stdout_filters_below_error(self, mock_dict):
|
||||
"""console_stdout应该只输出低于ERROR级别的日志"""
|
||||
setup_logging()
|
||||
config_used = mock_dict.call_args[0][0]
|
||||
self.assertIn("console_stdout", config_used["handlers"])
|
||||
self.assertIn("below_error", config_used["handlers"]["console_stdout"]["filters"])
|
||||
self.assertEqual(config_used["handlers"]["console_stdout"]["stream"], "ext://sys.stdout")
|
||||
def test_default_config_has_channels(self):
|
||||
"""Default config should have channel loggers configured"""
|
||||
fake_cfg = os.path.join(self.temp_dir, "no_such_cfg.json")
|
||||
|
||||
with patch("logging.config.dictConfig") as mock_dict:
|
||||
setup_logging(config_file=fake_cfg)
|
||||
config_used = mock_dict.call_args[0][0]
|
||||
# Check channel loggers exist
|
||||
self.assertIn("fastdeploy.main", config_used["loggers"])
|
||||
self.assertIn("fastdeploy.request", config_used["loggers"])
|
||||
self.assertIn("fastdeploy.console", config_used["loggers"])
|
||||
|
||||
def test_default_config_has_handlers(self):
|
||||
"""Default config should have file handlers configured"""
|
||||
fake_cfg = os.path.join(self.temp_dir, "no_such_cfg.json")
|
||||
|
||||
with patch("logging.config.dictConfig") as mock_dict:
|
||||
setup_logging(config_file=fake_cfg)
|
||||
config_used = mock_dict.call_args[0][0]
|
||||
# Check handlers exist
|
||||
self.assertIn("main_file", config_used["handlers"])
|
||||
self.assertIn("request_file", config_used["handlers"])
|
||||
self.assertIn("error_file", config_used["handlers"])
|
||||
self.assertIn("console_stderr", config_used["handlers"])
|
||||
|
||||
def test_default_config_stderr_handler(self):
|
||||
"""Default config console_stderr should output to stderr"""
|
||||
fake_cfg = os.path.join(self.temp_dir, "no_such_cfg.json")
|
||||
|
||||
with patch("logging.config.dictConfig") as mock_dict:
|
||||
setup_logging(config_file=fake_cfg)
|
||||
config_used = mock_dict.call_args[0][0]
|
||||
self.assertEqual(config_used["handlers"]["console_stderr"]["stream"], "ext://sys.stderr")
|
||||
self.assertEqual(config_used["handlers"]["console_stderr"]["level"], "ERROR")
|
||||
|
||||
def test_default_config_stdout_filters_below_error(self):
|
||||
"""Default config console_stdout should filter below ERROR level"""
|
||||
fake_cfg = os.path.join(self.temp_dir, "no_such_cfg.json")
|
||||
|
||||
with patch("logging.config.dictConfig") as mock_dict:
|
||||
setup_logging(config_file=fake_cfg)
|
||||
config_used = mock_dict.call_args[0][0]
|
||||
self.assertIn("console_stdout", config_used["handlers"])
|
||||
self.assertIn("below_error", config_used["handlers"]["console_stdout"]["filters"])
|
||||
self.assertEqual(config_used["handlers"]["console_stdout"]["stream"], "ext://sys.stdout")
|
||||
|
||||
|
||||
class TestMaxLevelFilter(unittest.TestCase):
|
||||
def test_filter_allows_below_level(self):
|
||||
"""MaxLevelFilter应该允许低于指定级别的日志通过"""
|
||||
"""MaxLevelFilter should allow logs below specified level"""
|
||||
filter = MaxLevelFilter("ERROR")
|
||||
record = logging.LogRecord(
|
||||
name="test", level=logging.INFO, pathname="", lineno=0, msg="test", args=(), exc_info=None
|
||||
@@ -153,7 +187,7 @@ class TestMaxLevelFilter(unittest.TestCase):
|
||||
self.assertTrue(filter.filter(record))
|
||||
|
||||
def test_filter_blocks_at_level(self):
|
||||
"""MaxLevelFilter应该阻止等于指定级别的日志"""
|
||||
"""MaxLevelFilter should block logs at specified level"""
|
||||
filter = MaxLevelFilter("ERROR")
|
||||
record = logging.LogRecord(
|
||||
name="test", level=logging.ERROR, pathname="", lineno=0, msg="test", args=(), exc_info=None
|
||||
@@ -161,7 +195,7 @@ class TestMaxLevelFilter(unittest.TestCase):
|
||||
self.assertFalse(filter.filter(record))
|
||||
|
||||
def test_filter_blocks_above_level(self):
|
||||
"""MaxLevelFilter应该阻止高于指定级别的日志"""
|
||||
"""MaxLevelFilter should block logs above specified level"""
|
||||
filter = MaxLevelFilter("ERROR")
|
||||
record = logging.LogRecord(
|
||||
name="test", level=logging.CRITICAL, pathname="", lineno=0, msg="test", args=(), exc_info=None
|
||||
@@ -169,7 +203,7 @@ class TestMaxLevelFilter(unittest.TestCase):
|
||||
self.assertFalse(filter.filter(record))
|
||||
|
||||
def test_filter_with_numeric_level(self):
|
||||
"""MaxLevelFilter应该支持数字级别"""
|
||||
"""MaxLevelFilter should support numeric level"""
|
||||
filter = MaxLevelFilter(logging.WARNING)
|
||||
info_record = logging.LogRecord(
|
||||
name="test", level=logging.INFO, pathname="", lineno=0, msg="test", args=(), exc_info=None
|
||||
@@ -181,5 +215,95 @@ class TestMaxLevelFilter(unittest.TestCase):
|
||||
self.assertFalse(filter.filter(warning_record))
|
||||
|
||||
|
||||
class TestChannelLoggers(unittest.TestCase):
|
||||
"""Test channel logger configuration via get_logger"""
|
||||
|
||||
def setUp(self):
|
||||
self.temp_dir = tempfile.mkdtemp(prefix="logger_channel_test_")
|
||||
if hasattr(setup_logging, "_configured"):
|
||||
delattr(setup_logging, "_configured")
|
||||
# Clear channel configuration cache
|
||||
from fastdeploy.logger.logger import FastDeployLogger
|
||||
|
||||
FastDeployLogger._configured_channels = set()
|
||||
|
||||
self.patches = [
|
||||
patch("fastdeploy.envs.FD_LOG_DIR", self.temp_dir),
|
||||
patch("fastdeploy.envs.FD_DEBUG", 0),
|
||||
patch("fastdeploy.envs.FD_LOG_BACKUP_COUNT", "3"),
|
||||
patch("fastdeploy.envs.FD_LOG_LEVEL", None),
|
||||
]
|
||||
[p.start() for p in self.patches]
|
||||
|
||||
def tearDown(self):
|
||||
[p.stop() for p in self.patches]
|
||||
shutil.rmtree(self.temp_dir, ignore_errors=True)
|
||||
if hasattr(setup_logging, "_configured"):
|
||||
delattr(setup_logging, "_configured")
|
||||
# Clear channel configuration cache
|
||||
from fastdeploy.logger.logger import FastDeployLogger
|
||||
|
||||
FastDeployLogger._configured_channels = set()
|
||||
|
||||
def test_main_channel_has_handlers(self):
|
||||
"""main channel root logger should have handlers"""
|
||||
from fastdeploy.logger import get_logger
|
||||
|
||||
get_logger("test", channel="main")
|
||||
main_channel = logging.getLogger("fastdeploy.main")
|
||||
self.assertTrue(len(main_channel.handlers) > 0)
|
||||
|
||||
def test_request_channel_has_handlers(self):
|
||||
"""request channel root logger should have handlers"""
|
||||
from fastdeploy.logger import get_logger
|
||||
|
||||
get_logger("test", channel="request")
|
||||
request_channel = logging.getLogger("fastdeploy.request")
|
||||
self.assertTrue(len(request_channel.handlers) > 0)
|
||||
|
||||
def test_console_channel_has_stdout_handler(self):
|
||||
"""console channel should have stdout handler"""
|
||||
from fastdeploy.logger import get_logger
|
||||
|
||||
get_logger("test", channel="console")
|
||||
console_channel = logging.getLogger("fastdeploy.console")
|
||||
handler_types = [type(h).__name__ for h in console_channel.handlers]
|
||||
self.assertIn("StreamHandler", handler_types)
|
||||
|
||||
def test_child_logger_propagates_to_channel(self):
|
||||
"""Child loggers should propagate to channel root logger"""
|
||||
from fastdeploy.logger import get_logger
|
||||
|
||||
logger = get_logger("child_test", channel="main")
|
||||
# Child logger should have no direct handlers (propagates to parent)
|
||||
self.assertEqual(len(logger.handlers), 0)
|
||||
self.assertEqual(logger.name, "fastdeploy.main.child_test")
|
||||
|
||||
def test_channel_file_mapping(self):
|
||||
"""Each channel should write to correct log file"""
|
||||
from fastdeploy.logger.logger import FastDeployLogger
|
||||
|
||||
expected_files = {
|
||||
"main": "fastdeploy.log",
|
||||
"request": "request.log",
|
||||
"console": "console.log",
|
||||
}
|
||||
self.assertEqual(FastDeployLogger._channel_files, expected_files)
|
||||
|
||||
def test_multiple_loggers_same_channel(self):
|
||||
"""Multiple loggers on same channel should share channel root handlers"""
|
||||
from fastdeploy.logger import get_logger
|
||||
|
||||
logger1 = get_logger("test1", channel="main")
|
||||
logger2 = get_logger("test2", channel="main")
|
||||
|
||||
main_channel = logging.getLogger("fastdeploy.main")
|
||||
# Both child loggers should have no handlers
|
||||
self.assertEqual(len(logger1.handlers), 0)
|
||||
self.assertEqual(len(logger2.handlers), 0)
|
||||
# Channel root should have handlers
|
||||
self.assertTrue(len(main_channel.handlers) > 0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
Reference in New Issue
Block a user