oobabooga 2 лет назад
Родитель
Сommit
33fb6aed74
1 измененных файлов с 1 добавлено и 1 удалено
  1. 1 1
      modules/text_generation.py

+ 1 - 1
modules/text_generation.py

@@ -115,7 +115,7 @@ def generate_reply(question, max_new_tokens, do_sample, temperature, top_p, typi
         print(f"\n\n{question}\n--------------------\n")
         print(f"\n\n{question}\n--------------------\n")
 
 
     input_ids = encode(question, max_new_tokens)
     input_ids = encode(question, max_new_tokens)
-    original_input_ids = input_ids
+    original_input_ids = output = input_ids
     cuda = "" if (shared.args.cpu or shared.args.deepspeed or shared.args.flexgen) else ".cuda()"
     cuda = "" if (shared.args.cpu or shared.args.deepspeed or shared.args.flexgen) else ".cuda()"
     n = shared.tokenizer.eos_token_id if eos_token is None else int(encode(eos_token)[0][-1])
     n = shared.tokenizer.eos_token_id if eos_token is None else int(encode(eos_token)[0][-1])
     if stopping_string is not None:
     if stopping_string is not None: