[BugFix] When the value of "temperature" is 0, adjust it to 1e-06 (#4919)

* 【DataProcessor】add options thinking_mode (#4735)

* add thinking_mode

* add thinking_mode

* add thinking_mode

* add thinking_mode

* add thinking_mode

* add thinking_mode

* add unit test

* [BugFix] When the value of "temperature" is 0, adjust it to 1e-06 (#4900)

* add default temperature value

* add unit test

* update

* update

* add unit test

* update

* fix unit test
This commit is contained in:
luukunn
2025-11-10 19:34:20 +08:00
committed by GitHub
parent c6e9717f33
commit f7159e31ba
3 changed files with 17 additions and 1 deletions
+2
View File
@@ -251,6 +251,8 @@ class LLMEngine:
request = Request.from_dict(task)
llm_logger.info(f"Receive request {request}")
if sampling_params is not None:
if sampling_params.temperature is not None and abs(sampling_params.temperature) < 1e-06:
sampling_params.temperature = 1e-06
request.sampling_params = sampling_params
request.preprocess_start_time = time.time()
chat_template_kwargs = kwargs.get("chat_template_kwargs") or {}
+2 -1
View File
@@ -304,7 +304,8 @@ class EngineClient:
api_server_logger.warning(
f"req_id: {data['request_id']}, reasoning_max_tokens exceeds max_tokens, the value of reasoning_max_tokens will be adjusted to match that of max_tokens"
)
if data.get("temperature") is not None and abs(data["temperature"]) < 1e-6:
data["temperature"] = 1e-6
# logprobs
logprobs = data.get("logprobs")
top_logprobs = None