Skip to content

Commit 4216ec4

Browse files
committed
remove input_text from generated_text
1 parent e893428 commit 4216ec4

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

tools/llm_bench/task/text_generation.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -199,9 +199,9 @@ def run_text_generation(input_text, num, model, tokenizer, args, iter_data_list,
199199
if result_md5_list != prev_md5:
200200
log.warning(f"[{num}] Prompt[{prompt_index}]'s md5 {result_md5_list} "
201201
f"is different from md5 of the {num - 1} iteration {prev_md5}")
202-
metrics_print.print_generated(num, warm_up=(num == 0), generated=generated_text[0], prompt_idx=prompt_index)
202+
metrics_print.print_generated(num, warm_up=(num == 0), generated=generated_text[0].replace(input_text, '[[[INPUT-HERE]]]'), prompt_idx=prompt_index)
203203
else:
204-
metrics_print.print_generated(num, warm_up=(num == 0), generated=generated_text[0], prompt_idx=prompt_index)
204+
metrics_print.print_generated(num, warm_up=(num == 0), generated=generated_text[0].replace(input_text, '[[[INPUT-HERE]]]'), prompt_idx=prompt_index)
205205
if bench_hook is not None:
206206
bench_hook.clear_time_list()
207207
bench_hook.clear_time_infer_list()
@@ -429,9 +429,9 @@ def run_text_generation_genai(input_text, num, model, tokenizer, args, iter_data
429429
if result_md5_list != prev_md5:
430430
log.warning(f"[{num}] Prompt[{prompt_index}]'s md5 {result_md5_list} "
431431
f"is different from md5 of the {num - 1} iteration {prev_md5}")
432-
metrics_print.print_generated(num, warm_up=(num == 0), generated=generated_text[0], prompt_idx=prompt_index)
432+
metrics_print.print_generated(num, warm_up=(num == 0), generated=generated_text[0].replace(input_text, '[[[INPUT-HERE]]]'), prompt_idx=prompt_index)
433433
else:
434-
metrics_print.print_generated(num, warm_up=(num == 0), generated=generated_text[0], prompt_idx=prompt_index)
434+
metrics_print.print_generated(num, warm_up=(num == 0), generated=generated_text[0].replace(input_text, '[[[INPUT-HERE]]]'), prompt_idx=prompt_index)
435435

436436

437437
def run_text_generation_genai_with_stream(input_text, num, model, tokenizer, args, iter_data_list, md5_list,
@@ -578,9 +578,9 @@ def run_text_generation_genai_with_stream(input_text, num, model, tokenizer, arg
578578
if result_md5_list != prev_md5:
579579
log.warning(f"[{num}] Prompt[{prompt_index}]'s md5 {result_md5_list} "
580580
f"is different from md5 of the {num - 1} iteration {prev_md5}")
581-
metrics_print.print_generated(num, warm_up=(num == 0), generated=generated_text[0], prompt_idx=prompt_index)
581+
metrics_print.print_generated(num, warm_up=(num == 0), generated=generated_text[0].replace(input_text, '[[[INPUT-HERE]]]'), prompt_idx=prompt_index)
582582
else:
583-
metrics_print.print_generated(num, warm_up=(num == 0), generated=generated_text[0], prompt_idx=prompt_index)
583+
metrics_print.print_generated(num, warm_up=(num == 0), generated=generated_text[0].replace(input_text, '[[[INPUT-HERE]]]'), prompt_idx=prompt_index)
584584
streamer.reset()
585585

586586

0 commit comments

Comments
 (0)