[BugFix] fix offline llm chat "enable_thinking" is always "False" (#4686)

* fix enable_thinking

* recover ernie4_5_vl_processor
This commit is contained in:
kxz2002
2025-10-30 19:45:41 +08:00
committed by GitHub
parent 0089287534
commit 7dc9d9885e
3 changed files with 3 additions and 3 deletions
+1 -1
View File
@@ -245,7 +245,7 @@ class DataProcessor(BaseDataProcessor):
if chat_template_kwargs:
if isinstance(chat_template_kwargs, dict):
for k, v in chat_template_kwargs.items():
if k not in task:
if k not in task or task[k] is None:
task[k] = v
else:
raise ValueError("Invalid input: chat_template_kwargs must be a dict")