【Fix】 remove text_after_process & raw_prediction (#4421)

* remove text_after_process &  raw_prediction

* remove text_after_process &  raw_prediction
This commit is contained in:
LiqinruiG
2025-10-16 19:00:18 +08:00
committed by GitHub
parent 8f77adc381
commit 4251ac5e95
20 changed files with 169 additions and 105 deletions
+4 -4
View File
@@ -197,7 +197,7 @@ class Ernie4_5Processor(BaseDataProcessor):
if isinstance(prompt, list): # if prompt is a token id list
request["prompt_token_ids"] = prompt
else:
request["text_after_process"] = prompt
request["prompt_tokens"] = prompt
tokens = self.tokenizer.tokenize(prompt)
token_ids = self.tokenizer.convert_tokens_to_ids(tokens)
request["prompt_token_ids"] = token_ids
@@ -318,7 +318,7 @@ class Ernie4_5Processor(BaseDataProcessor):
if tool_call_info.tools_called:
response_dict["outputs"]["tool_call"] = tool_call_info.tool_calls
response_dict["outputs"]["text"] = tool_call_info.content
response_dict["outputs"]["raw_prediction"] = full_text
response_dict["outputs"]["completion_tokens"] = full_text
data_processor_logger.info(f"req_id:{req_id}, decode_status: {self.decode_status[req_id]}")
del self.decode_status[req_id]
return response_dict
@@ -342,7 +342,7 @@ class Ernie4_5Processor(BaseDataProcessor):
if token_ids[-1] == self.tokenizer.eos_token_id:
token_ids = token_ids[:-1]
delta_text, previous_token_ids, previous_texts = self.ids2tokens(token_ids, req_id)
response_dict["outputs"]["raw_prediction"] = delta_text
response_dict["outputs"]["completion_tokens"] = delta_text
if self.reasoning_parser and (
enable_thinking or self.reasoning_parser.__class__.__name__ == "ErnieX1ReasoningParser"
):
@@ -398,7 +398,7 @@ class Ernie4_5Processor(BaseDataProcessor):
add_special_tokens=False,
**kwargs,
)
request_or_messages["text_after_process"] = spliced_message
request_or_messages["prompt_tokens"] = spliced_message
req_id = None
if isinstance(request_or_messages, dict):
req_id = request_or_messages.get("request_id", None)