chat.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396
  1. import base64
  2. import copy
  3. import io
  4. import json
  5. import re
  6. from datetime import datetime
  7. from pathlib import Path
  8. from PIL import Image
  9. import modules.extensions as extensions_module
  10. import modules.shared as shared
  11. from modules.extensions import apply_extensions
  12. from modules.html_generator import fix_newlines, generate_chat_html
  13. from modules.text_generation import (encode, generate_reply,
  14. get_max_prompt_length)
  15. def generate_chat_output(history, name1, name2, character):
  16. if shared.args.cai_chat:
  17. return generate_chat_html(history, name1, name2, character)
  18. else:
  19. return history
  20. def generate_chat_prompt(user_input, max_new_tokens, name1, name2, context, chat_prompt_size, impersonate=False, also_return_rows=False):
  21. user_input = fix_newlines(user_input)
  22. rows = [f"{context.strip()}\n"]
  23. if shared.soft_prompt:
  24. chat_prompt_size -= shared.soft_prompt_tensor.shape[1]
  25. max_length = min(get_max_prompt_length(max_new_tokens), chat_prompt_size)
  26. i = len(shared.history['internal'])-1
  27. while i >= 0 and len(encode(''.join(rows), max_new_tokens)[0]) < max_length:
  28. rows.insert(1, f"{name2}: {shared.history['internal'][i][1].strip()}\n")
  29. prev_user_input = shared.history['internal'][i][0]
  30. if len(prev_user_input) > 0 and prev_user_input != '<|BEGIN-VISIBLE-CHAT|>':
  31. rows.insert(1, f"{name1}: {prev_user_input.strip()}\n")
  32. i -= 1
  33. if not impersonate:
  34. if len(user_input) > 0:
  35. rows.append(f"{name1}: {user_input}\n")
  36. rows.append(apply_extensions(f"{name2}:", "bot_prefix"))
  37. limit = 3
  38. else:
  39. rows.append(f"{name1}:")
  40. limit = 2
  41. while len(rows) > limit and len(encode(''.join(rows), max_new_tokens)[0]) >= max_length:
  42. rows.pop(1)
  43. prompt = ''.join(rows)
  44. if also_return_rows:
  45. return prompt, rows
  46. else:
  47. return prompt
  48. def extract_message_from_reply(reply, name1, name2, check):
  49. next_character_found = False
  50. if check:
  51. lines = reply.split('\n')
  52. reply = lines[0].strip()
  53. if len(lines) > 1:
  54. next_character_found = True
  55. else:
  56. for string in [f"\n{name1}:", f"\n{name2}:"]:
  57. idx = reply.find(string)
  58. if idx != -1:
  59. reply = reply[:idx]
  60. next_character_found = True
  61. # If something like "\nYo" is generated just before "\nYou:"
  62. # is completed, trim it
  63. if not next_character_found:
  64. for string in [f"\n{name1}:", f"\n{name2}:"]:
  65. for j in range(len(string)-1, 0, -1):
  66. if reply[-j:] == string[:j]:
  67. reply = reply[:-j]
  68. break
  69. reply = fix_newlines(reply)
  70. return reply, next_character_found
  71. def chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, check, chat_prompt_size, chat_generation_attempts=1, regenerate=False):
  72. just_started = True
  73. eos_token = '\n' if check else None
  74. name1_original = name1
  75. if 'pygmalion' in shared.model_name.lower():
  76. name1 = "You"
  77. # Check if any extension wants to hijack this function call
  78. visible_text = None
  79. custom_generate_chat_prompt = None
  80. for extension, _ in extensions_module.iterator():
  81. if hasattr(extension, 'input_hijack') and extension.input_hijack['state'] == True:
  82. extension.input_hijack['state'] = False
  83. text, visible_text = extension.input_hijack['value']
  84. if custom_generate_chat_prompt is None and hasattr(extension, 'custom_generate_chat_prompt'):
  85. custom_generate_chat_prompt = extension.custom_generate_chat_prompt
  86. if visible_text is None:
  87. visible_text = text
  88. if shared.args.chat:
  89. visible_text = visible_text.replace('\n', '<br>')
  90. text = apply_extensions(text, "input")
  91. if custom_generate_chat_prompt is None:
  92. prompt = generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size)
  93. else:
  94. prompt = custom_generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size)
  95. # Yield *Is typing...*
  96. if not regenerate:
  97. yield shared.history['visible']+[[visible_text, shared.processing_message]]
  98. # Generate
  99. cumulative_reply = ''
  100. for i in range(chat_generation_attempts):
  101. reply = None
  102. for reply in generate_reply(f"{prompt}{' ' if len(cumulative_reply) > 0 else ''}{cumulative_reply}", max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, eos_token=eos_token, stopping_strings=[f"\n{name1}:", f"\n{name2}:"]):
  103. reply = cumulative_reply + reply
  104. # Extracting the reply
  105. reply, next_character_found = extract_message_from_reply(reply, name1, name2, check)
  106. visible_reply = re.sub("(<USER>|<user>|{{user}})", name1_original, reply)
  107. visible_reply = apply_extensions(visible_reply, "output")
  108. if shared.args.chat:
  109. visible_reply = visible_reply.replace('\n', '<br>')
  110. # We need this global variable to handle the Stop event,
  111. # otherwise gradio gets confused
  112. if shared.stop_everything:
  113. return shared.history['visible']
  114. if just_started:
  115. just_started = False
  116. shared.history['internal'].append(['', ''])
  117. shared.history['visible'].append(['', ''])
  118. shared.history['internal'][-1] = [text, reply]
  119. shared.history['visible'][-1] = [visible_text, visible_reply]
  120. if not shared.args.no_stream:
  121. yield shared.history['visible']
  122. if next_character_found:
  123. break
  124. if reply is not None:
  125. cumulative_reply = reply
  126. yield shared.history['visible']
  127. def impersonate_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, check, chat_prompt_size, chat_generation_attempts=1):
  128. eos_token = '\n' if check else None
  129. if 'pygmalion' in shared.model_name.lower():
  130. name1 = "You"
  131. prompt = generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size, impersonate=True)
  132. # Yield *Is typing...*
  133. yield shared.processing_message
  134. cumulative_reply = ''
  135. for i in range(chat_generation_attempts):
  136. reply = None
  137. for reply in generate_reply(f"{prompt}{' ' if len(cumulative_reply) > 0 else ''}{cumulative_reply}", max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, eos_token=eos_token, stopping_strings=[f"\n{name1}:", f"\n{name2}:"]):
  138. reply = cumulative_reply + reply
  139. reply, next_character_found = extract_message_from_reply(reply, name1, name2, check)
  140. yield reply
  141. if next_character_found:
  142. break
  143. if reply is not None:
  144. cumulative_reply = reply
  145. yield reply
  146. def cai_chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, check, chat_prompt_size, chat_generation_attempts=1):
  147. for _history in chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, check, chat_prompt_size, chat_generation_attempts):
  148. yield generate_chat_html(_history, name1, name2, shared.character)
  149. def regenerate_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, check, chat_prompt_size, chat_generation_attempts=1):
  150. if (shared.character != 'None' and len(shared.history['visible']) == 1) or len(shared.history['internal']) == 0:
  151. yield generate_chat_output(shared.history['visible'], name1, name2, shared.character)
  152. else:
  153. last_visible = shared.history['visible'].pop()
  154. last_internal = shared.history['internal'].pop()
  155. # Yield '*Is typing...*'
  156. yield generate_chat_output(shared.history['visible']+[[last_visible[0], shared.processing_message]], name1, name2, shared.character)
  157. for _history in chatbot_wrapper(last_internal[0], max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, check, chat_prompt_size, chat_generation_attempts, regenerate=True):
  158. if shared.args.cai_chat:
  159. shared.history['visible'][-1] = [last_visible[0], _history[-1][1]]
  160. else:
  161. shared.history['visible'][-1] = (last_visible[0], _history[-1][1])
  162. yield generate_chat_output(shared.history['visible'], name1, name2, shared.character)
  163. def remove_last_message(name1, name2):
  164. if len(shared.history['visible']) > 0 and not shared.history['internal'][-1][0] == '<|BEGIN-VISIBLE-CHAT|>':
  165. last = shared.history['visible'].pop()
  166. shared.history['internal'].pop()
  167. else:
  168. last = ['', '']
  169. if shared.args.cai_chat:
  170. return generate_chat_html(shared.history['visible'], name1, name2, shared.character), last[0]
  171. else:
  172. return shared.history['visible'], last[0]
  173. def send_last_reply_to_input():
  174. if len(shared.history['internal']) > 0:
  175. return shared.history['internal'][-1][1]
  176. else:
  177. return ''
  178. def replace_last_reply(text, name1, name2):
  179. if len(shared.history['visible']) > 0:
  180. if shared.args.cai_chat:
  181. shared.history['visible'][-1][1] = text
  182. else:
  183. shared.history['visible'][-1] = (shared.history['visible'][-1][0], text)
  184. shared.history['internal'][-1][1] = apply_extensions(text, "input")
  185. return generate_chat_output(shared.history['visible'], name1, name2, shared.character)
  186. def clear_html():
  187. return generate_chat_html([], "", "", shared.character)
  188. def clear_chat_log(name1, name2):
  189. if shared.character != 'None':
  190. found = False
  191. for i in range(len(shared.history['internal'])):
  192. if '<|BEGIN-VISIBLE-CHAT|>' in shared.history['internal'][i][0]:
  193. shared.history['visible'] = [['', apply_extensions(shared.history['internal'][i][1], "output")]]
  194. shared.history['internal'] = [shared.history['internal'][i]]
  195. found = True
  196. break
  197. if not found:
  198. shared.history['visible'] = []
  199. shared.history['internal'] = []
  200. else:
  201. shared.history['internal'] = []
  202. shared.history['visible'] = []
  203. return generate_chat_output(shared.history['visible'], name1, name2, shared.character)
  204. def redraw_html(name1, name2):
  205. return generate_chat_html(shared.history['visible'], name1, name2, shared.character)
  206. def tokenize_dialogue(dialogue, name1, name2):
  207. _history = []
  208. dialogue = re.sub('<START>', '', dialogue)
  209. dialogue = re.sub('<start>', '', dialogue)
  210. dialogue = re.sub('(\n|^)[Aa]non:', '\\1You:', dialogue)
  211. dialogue = re.sub('(\n|^)\[CHARACTER\]:', f'\\g<1>{name2}:', dialogue)
  212. idx = [m.start() for m in re.finditer(f"(^|\n)({re.escape(name1)}|{re.escape(name2)}):", dialogue)]
  213. if len(idx) == 0:
  214. return _history
  215. messages = []
  216. for i in range(len(idx)-1):
  217. messages.append(dialogue[idx[i]:idx[i+1]].strip())
  218. messages.append(dialogue[idx[-1]:].strip())
  219. entry = ['', '']
  220. for i in messages:
  221. if i.startswith(f'{name1}:'):
  222. entry[0] = i[len(f'{name1}:'):].strip()
  223. elif i.startswith(f'{name2}:'):
  224. entry[1] = i[len(f'{name2}:'):].strip()
  225. if not (len(entry[0]) == 0 and len(entry[1]) == 0):
  226. _history.append(entry)
  227. entry = ['', '']
  228. print("\033[1;32;1m\nDialogue tokenized to:\033[0;37;0m\n", end='')
  229. for row in _history:
  230. for column in row:
  231. print("\n")
  232. for line in column.strip().split('\n'):
  233. print("| "+line+"\n")
  234. print("|\n")
  235. print("------------------------------")
  236. return _history
  237. def save_history(timestamp=True):
  238. prefix = '' if shared.character == 'None' else f"{shared.character}_"
  239. if timestamp:
  240. fname = f"{prefix}{datetime.now().strftime('%Y%m%d-%H%M%S')}.json"
  241. else:
  242. fname = f"{prefix}persistent.json"
  243. if not Path('logs').exists():
  244. Path('logs').mkdir()
  245. with open(Path(f'logs/{fname}'), 'w', encoding='utf-8') as f:
  246. f.write(json.dumps({'data': shared.history['internal'], 'data_visible': shared.history['visible']}, indent=2))
  247. return Path(f'logs/{fname}')
  248. def load_history(file, name1, name2):
  249. file = file.decode('utf-8')
  250. try:
  251. j = json.loads(file)
  252. if 'data' in j:
  253. shared.history['internal'] = j['data']
  254. if 'data_visible' in j:
  255. shared.history['visible'] = j['data_visible']
  256. else:
  257. shared.history['visible'] = copy.deepcopy(shared.history['internal'])
  258. # Compatibility with Pygmalion AI's official web UI
  259. elif 'chat' in j:
  260. shared.history['internal'] = [':'.join(x.split(':')[1:]).strip() for x in j['chat']]
  261. if len(j['chat']) > 0 and j['chat'][0].startswith(f'{name2}:'):
  262. shared.history['internal'] = [['<|BEGIN-VISIBLE-CHAT|>', shared.history['internal'][0]]] + [[shared.history['internal'][i], shared.history['internal'][i+1]] for i in range(1, len(shared.history['internal'])-1, 2)]
  263. shared.history['visible'] = copy.deepcopy(shared.history['internal'])
  264. shared.history['visible'][0][0] = ''
  265. else:
  266. shared.history['internal'] = [[shared.history['internal'][i], shared.history['internal'][i+1]] for i in range(0, len(shared.history['internal'])-1, 2)]
  267. shared.history['visible'] = copy.deepcopy(shared.history['internal'])
  268. except:
  269. shared.history['internal'] = tokenize_dialogue(file, name1, name2)
  270. shared.history['visible'] = copy.deepcopy(shared.history['internal'])
  271. def load_default_history(name1, name2):
  272. if Path('logs/persistent.json').exists():
  273. load_history(open(Path('logs/persistent.json'), 'rb').read(), name1, name2)
  274. else:
  275. shared.history['internal'] = []
  276. shared.history['visible'] = []
  277. def load_character(_character, name1, name2):
  278. context = ""
  279. shared.history['internal'] = []
  280. shared.history['visible'] = []
  281. if _character != 'None':
  282. shared.character = _character
  283. data = json.loads(open(Path(f'characters/{_character}.json'), 'r', encoding='utf-8').read())
  284. name2 = data['char_name']
  285. if 'char_persona' in data and data['char_persona'] != '':
  286. context += f"{data['char_name']}'s Persona: {data['char_persona']}\n"
  287. if 'world_scenario' in data and data['world_scenario'] != '':
  288. context += f"Scenario: {data['world_scenario']}\n"
  289. context = f"{context.strip()}\n<START>\n"
  290. if 'example_dialogue' in data and data['example_dialogue'] != '':
  291. data['example_dialogue'] = data['example_dialogue'].replace('{{user}}', name1).replace('{{char}}', name2)
  292. data['example_dialogue'] = data['example_dialogue'].replace('<USER>', name1).replace('<BOT>', name2)
  293. context += f"{data['example_dialogue'].strip()}\n"
  294. if 'char_greeting' in data and len(data['char_greeting'].strip()) > 0:
  295. shared.history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', data['char_greeting']]]
  296. shared.history['visible'] += [['', apply_extensions(data['char_greeting'], "output")]]
  297. else:
  298. shared.history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', "Hello there!"]]
  299. shared.history['visible'] += [['', "Hello there!"]]
  300. else:
  301. shared.character = None
  302. context = shared.settings['context_pygmalion']
  303. name2 = shared.settings['name2_pygmalion']
  304. if Path(f'logs/{shared.character}_persistent.json').exists():
  305. load_history(open(Path(f'logs/{shared.character}_persistent.json'), 'rb').read(), name1, name2)
  306. if shared.args.cai_chat:
  307. return name2, context, generate_chat_html(shared.history['visible'], name1, name2, shared.character)
  308. else:
  309. return name2, context, shared.history['visible']
  310. def upload_character(json_file, img, tavern=False):
  311. json_file = json_file if type(json_file) == str else json_file.decode('utf-8')
  312. data = json.loads(json_file)
  313. outfile_name = data["char_name"]
  314. i = 1
  315. while Path(f'characters/{outfile_name}.json').exists():
  316. outfile_name = f'{data["char_name"]}_{i:03d}'
  317. i += 1
  318. if tavern:
  319. outfile_name = f'TavernAI-{outfile_name}'
  320. with open(Path(f'characters/{outfile_name}.json'), 'w', encoding='utf-8') as f:
  321. f.write(json_file)
  322. if img is not None:
  323. img = Image.open(io.BytesIO(img))
  324. img.save(Path(f'characters/{outfile_name}.png'))
  325. print(f'New character saved to "characters/{outfile_name}.json".')
  326. return outfile_name
  327. def upload_tavern_character(img, name1, name2):
  328. _img = Image.open(io.BytesIO(img))
  329. _img.getexif()
  330. decoded_string = base64.b64decode(_img.info['chara'])
  331. _json = json.loads(decoded_string)
  332. _json = {"char_name": _json['name'], "char_persona": _json['description'], "char_greeting": _json["first_mes"], "example_dialogue": _json['mes_example'], "world_scenario": _json['scenario']}
  333. return upload_character(json.dumps(_json), img, tavern=True)
  334. def upload_your_profile_picture(img):
  335. img = Image.open(io.BytesIO(img))
  336. img.save(Path('img_me.png'))
  337. print('Profile picture saved to "img_me.png"')