浏览代码

Minor bug fix

oobabooga 2 年之前
父节点
当前提交
33fb6aed74
共有 1 个文件被更改,包括 1 次插入1 次删除
  1. 1 1
      modules/text_generation.py

+ 1 - 1
modules/text_generation.py

@@ -115,7 +115,7 @@ def generate_reply(question, max_new_tokens, do_sample, temperature, top_p, typi
         print(f"\n\n{question}\n--------------------\n")
 
     input_ids = encode(question, max_new_tokens)
-    original_input_ids = input_ids
+    original_input_ids = output = input_ids
     cuda = "" if (shared.args.cpu or shared.args.deepspeed or shared.args.flexgen) else ".cuda()"
     n = shared.tokenizer.eos_token_id if eos_token is None else int(encode(eos_token)[0][-1])
     if stopping_string is not None: