chat.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376
  1. import base64
  2. import copy
  3. import io
  4. import json
  5. import re
  6. from datetime import datetime
  7. from io import BytesIO
  8. from pathlib import Path
  9. import modules.shared as shared
  10. from modules.extensions import apply_extensions
  11. from modules.html_generator import generate_chat_html
  12. from modules.text_generation import encode
  13. from modules.text_generation import generate_reply
  14. from modules.text_generation import get_max_prompt_length
  15. from PIL import Image
  16. if shared.args.picture and (shared.args.cai_chat or shared.args.chat):
  17. import modules.bot_picture as bot_picture
  18. history = {'internal': [], 'visible': []}
  19. character = None
  20. # This gets the new line characters right.
  21. def clean_chat_message(text):
  22. text = text.replace('\n', '\n\n')
  23. text = re.sub(r"\n{3,}", "\n\n", text)
  24. text = text.strip()
  25. return text
  26. def generate_chat_prompt(text, tokens, name1, name2, context, chat_prompt_size, impersonate=False):
  27. text = clean_chat_message(text)
  28. rows = [f"{context.strip()}\n"]
  29. i = len(history['internal'])-1
  30. count = 0
  31. if shared.soft_prompt:
  32. chat_prompt_size -= shared.soft_prompt_tensor.shape[1]
  33. max_length = min(get_max_prompt_length(tokens), chat_prompt_size)
  34. while i >= 0 and len(encode(''.join(rows), tokens)[0]) < max_length:
  35. rows.insert(1, f"{name2}: {history['internal'][i][1].strip()}\n")
  36. count += 1
  37. if not (history['internal'][i][0] == '<|BEGIN-VISIBLE-CHAT|>'):
  38. rows.insert(1, f"{name1}: {history['internal'][i][0].strip()}\n")
  39. count += 1
  40. i -= 1
  41. if not impersonate:
  42. rows.append(f"{name1}: {text}\n")
  43. rows.append(apply_extensions(f"{name2}:", "bot_prefix"))
  44. limit = 3
  45. else:
  46. rows.append(f"{name1}:")
  47. limit = 2
  48. while len(rows) > limit and len(encode(''.join(rows), tokens)[0]) >= max_length:
  49. rows.pop(1)
  50. rows.pop(1)
  51. question = ''.join(rows)
  52. return question
  53. def extract_message_from_reply(question, reply, current, other, check, extensions=False):
  54. next_character_found = False
  55. substring_found = False
  56. previous_idx = [m.start() for m in re.finditer(f"(^|\n){re.escape(current)}:", question)]
  57. idx = [m.start() for m in re.finditer(f"(^|\n){re.escape(current)}:", reply)]
  58. idx = idx[len(previous_idx)-1]
  59. if extensions:
  60. reply = reply[idx + 1 + len(apply_extensions(f"{current}:", "bot_prefix")):]
  61. else:
  62. reply = reply[idx + 1 + len(f"{current}:"):]
  63. if check:
  64. reply = reply.split('\n')[0].strip()
  65. else:
  66. idx = reply.find(f"\n{other}:")
  67. if idx != -1:
  68. reply = reply[:idx]
  69. next_character_found = True
  70. reply = clean_chat_message(reply)
  71. # Detect if something like "\nYo" is generated just before
  72. # "\nYou:" is completed
  73. tmp = f"\n{other}:"
  74. for j in range(1, len(tmp)):
  75. if reply[-j:] == tmp[:j]:
  76. substring_found = True
  77. return reply, next_character_found, substring_found
  78. def generate_chat_picture(picture, name1, name2):
  79. text = f'*{name1} sends {name2} a picture that contains the following: "{bot_picture.caption_image(picture)}"*'
  80. buffer = BytesIO()
  81. picture.save(buffer, format="JPEG")
  82. img_str = base64.b64encode(buffer.getvalue()).decode('utf-8')
  83. visible_text = f'<img src="data:image/jpeg;base64,{img_str}">'
  84. return text, visible_text
  85. def stop_everything_event():
  86. global stop_everything
  87. stop_everything = True
  88. def chatbot_wrapper(text, tokens, do_sample, max_new_tokens, temperature, top_p, typical_p, repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, name1, name2, context, check, chat_prompt_size, picture=None):
  89. global stop_everything
  90. stop_everything = False
  91. if 'pygmalion' in shared.model_name.lower():
  92. name1 = "You"
  93. if shared.args.picture and picture is not None:
  94. text, visible_text = generate_chat_picture(picture, name1, name2)
  95. else:
  96. visible_text = text
  97. if shared.args.chat:
  98. visible_text = visible_text.replace('\n', '<br>')
  99. text = apply_extensions(text, "input")
  100. question = generate_chat_prompt(text, tokens, name1, name2, context, chat_prompt_size)
  101. eos_token = '\n' if check else None
  102. first = True
  103. for reply in generate_reply(question, tokens, do_sample, max_new_tokens, temperature, top_p, typical_p, repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, eos_token=eos_token, stopping_string=f"\n{name1}:"):
  104. reply, next_character_found, substring_found = extract_message_from_reply(question, reply, name2, name1, check, extensions=True)
  105. visible_reply = apply_extensions(reply, "output")
  106. if shared.args.chat:
  107. visible_reply = visible_reply.replace('\n', '<br>')
  108. # We need this global variable to handle the Stop event,
  109. # otherwise gradio gets confused
  110. if stop_everything:
  111. return history['visible']
  112. if first:
  113. first = False
  114. history['internal'].append(['', ''])
  115. history['visible'].append(['', ''])
  116. history['internal'][-1] = [text, reply]
  117. history['visible'][-1] = [visible_text, visible_reply]
  118. if not substring_found:
  119. yield history['visible']
  120. if next_character_found:
  121. break
  122. yield history['visible']
  123. def impersonate_wrapper(text, tokens, do_sample, max_new_tokens, temperature, top_p, typical_p, repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, name1, name2, context, check, chat_prompt_size, picture=None):
  124. if 'pygmalion' in shared.model_name.lower():
  125. name1 = "You"
  126. question = generate_chat_prompt(text, tokens, name1, name2, context, chat_prompt_size, impersonate=True)
  127. eos_token = '\n' if check else None
  128. for reply in generate_reply(question, tokens, do_sample, max_new_tokens, temperature, top_p, typical_p, repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, eos_token=eos_token, stopping_string=f"\n{name2}:"):
  129. reply, next_character_found, substring_found = extract_message_from_reply(question, reply, name1, name2, check, extensions=False)
  130. if not substring_found:
  131. yield reply
  132. if next_character_found:
  133. break
  134. yield reply
  135. def cai_chatbot_wrapper(text, tokens, do_sample, max_new_tokens, temperature, top_p, typical_p, repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, name1, name2, context, check, chat_prompt_size, picture=None):
  136. for _history in chatbot_wrapper(text, tokens, do_sample, max_new_tokens, temperature, top_p, typical_p, repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, name1, name2, context, check, chat_prompt_size, picture):
  137. yield generate_chat_html(_history, name1, name2, character)
  138. def regenerate_wrapper(text, tokens, do_sample, max_new_tokens, temperature, top_p, typical_p, repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, name1, name2, context, check, chat_prompt_size, picture=None):
  139. if character is not None and len(history['visible']) == 1:
  140. if shared.args.cai_chat:
  141. yield generate_chat_html(history['visible'], name1, name2, character)
  142. else:
  143. yield history['visible']
  144. else:
  145. last_visible = history['visible'].pop()
  146. last_internal = history['internal'].pop()
  147. for _history in chatbot_wrapper(last_internal[0], tokens, do_sample, max_new_tokens, temperature, top_p, typical_p, repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, name1, name2, context, check, chat_prompt_size, picture):
  148. if shared.args.cai_chat:
  149. history['visible'][-1] = [last_visible[0], _history[-1][1]]
  150. yield generate_chat_html(history['visible'], name1, name2, character)
  151. else:
  152. history['visible'][-1] = (last_visible[0], _history[-1][1])
  153. yield history['visible']
  154. def remove_last_message(name1, name2):
  155. if not history['internal'][-1][0] == '<|BEGIN-VISIBLE-CHAT|>':
  156. last = history['visible'].pop()
  157. history['internal'].pop()
  158. else:
  159. last = ['', '']
  160. if shared.args.cai_chat:
  161. return generate_chat_html(history['visible'], name1, name2, character), last[0]
  162. else:
  163. return history['visible'], last[0]
  164. def send_last_reply_to_input():
  165. if len(history['internal']) > 0:
  166. return history['internal'][-1][1]
  167. else:
  168. return ''
  169. def replace_last_reply(text, name1, name2):
  170. if len(history['visible']) > 0:
  171. if shared.args.cai_chat:
  172. history['visible'][-1][1] = text
  173. else:
  174. history['visible'][-1] = (history['visible'][-1][0], text)
  175. history['internal'][-1][1] = apply_extensions(text, "input")
  176. if shared.args.cai_chat:
  177. return generate_chat_html(history['visible'], name1, name2, character)
  178. else:
  179. return history['visible']
  180. def clear_html():
  181. return generate_chat_html([], "", "", character)
  182. def clear_chat_log(_character, name1, name2):
  183. global history
  184. if _character != 'None':
  185. for i in range(len(history['internal'])):
  186. if '<|BEGIN-VISIBLE-CHAT|>' in history['internal'][i][0]:
  187. history['visible'] = [['', history['internal'][i][1]]]
  188. history['internal'] = history['internal'][:i+1]
  189. break
  190. else:
  191. history['internal'] = []
  192. history['visible'] = []
  193. if shared.args.cai_chat:
  194. return generate_chat_html(history['visible'], name1, name2, character)
  195. else:
  196. return history['visible']
  197. def redraw_html(name1, name2):
  198. global history
  199. return generate_chat_html(history['visible'], name1, name2, character)
  200. def tokenize_dialogue(dialogue, name1, name2):
  201. _history = []
  202. dialogue = re.sub('<START>', '', dialogue)
  203. dialogue = re.sub('<start>', '', dialogue)
  204. dialogue = re.sub('(\n|^)[Aa]non:', '\\1You:', dialogue)
  205. dialogue = re.sub('(\n|^)\[CHARACTER\]:', f'\\g<1>{name2}:', dialogue)
  206. idx = [m.start() for m in re.finditer(f"(^|\n)({re.escape(name1)}|{re.escape(name2)}):", dialogue)]
  207. if len(idx) == 0:
  208. return _history
  209. messages = []
  210. for i in range(len(idx)-1):
  211. messages.append(dialogue[idx[i]:idx[i+1]].strip())
  212. messages.append(dialogue[idx[-1]:].strip())
  213. entry = ['', '']
  214. for i in messages:
  215. if i.startswith(f'{name1}:'):
  216. entry[0] = i[len(f'{name1}:'):].strip()
  217. elif i.startswith(f'{name2}:'):
  218. entry[1] = i[len(f'{name2}:'):].strip()
  219. if not (len(entry[0]) == 0 and len(entry[1]) == 0):
  220. _history.append(entry)
  221. entry = ['', '']
  222. print(f"\033[1;32;1m\nDialogue tokenized to:\033[0;37;0m\n", end='')
  223. for row in _history:
  224. for column in row:
  225. print("\n")
  226. for line in column.strip().split('\n'):
  227. print("| "+line+"\n")
  228. print("|\n")
  229. print("------------------------------")
  230. return _history
  231. def save_history(timestamp=True):
  232. if timestamp:
  233. fname = f"{character or ''}{'_' if character else ''}{datetime.now().strftime('%Y%m%d-%H%M%S')}.json"
  234. else:
  235. fname = f"{character or ''}{'_' if character else ''}persistent.json"
  236. if not Path('logs').exists():
  237. Path('logs').mkdir()
  238. with open(Path(f'logs/{fname}'), 'w') as f:
  239. f.write(json.dumps({'data': history['internal'], 'data_visible': history['visible']}, indent=2))
  240. return Path(f'logs/{fname}')
  241. def load_history(file, name1, name2):
  242. global history
  243. file = file.decode('utf-8')
  244. try:
  245. j = json.loads(file)
  246. if 'data' in j:
  247. history['internal'] = j['data']
  248. if 'data_visible' in j:
  249. history['visible'] = j['data_visible']
  250. else:
  251. history['visible'] = copy.deepcopy(history['internal'])
  252. # Compatibility with Pygmalion AI's official web UI
  253. elif 'chat' in j:
  254. history['internal'] = [':'.join(x.split(':')[1:]).strip() for x in j['chat']]
  255. if len(j['chat']) > 0 and j['chat'][0].startswith(f'{name2}:'):
  256. history['internal'] = [['<|BEGIN-VISIBLE-CHAT|>', history['internal'][0]]] + [[history['internal'][i], history['internal'][i+1]] for i in range(1, len(history['internal'])-1, 2)]
  257. history['visible'] = copy.deepcopy(history['internal'])
  258. history['visible'][0][0] = ''
  259. else:
  260. history['internal'] = [[history['internal'][i], history['internal'][i+1]] for i in range(0, len(history['internal'])-1, 2)]
  261. history['visible'] = copy.deepcopy(history['internal'])
  262. except:
  263. history['internal'] = tokenize_dialogue(file, name1, name2)
  264. history['visible'] = copy.deepcopy(history['internal'])
  265. def load_character(_character, name1, name2):
  266. global history, character
  267. context = ""
  268. history['internal'] = []
  269. history['visible'] = []
  270. if _character != 'None':
  271. character = _character
  272. data = json.loads(open(Path(f'characters/{_character}.json'), 'r').read())
  273. name2 = data['char_name']
  274. if 'char_persona' in data and data['char_persona'] != '':
  275. context += f"{data['char_name']}'s Persona: {data['char_persona']}\n"
  276. if 'world_scenario' in data and data['world_scenario'] != '':
  277. context += f"Scenario: {data['world_scenario']}\n"
  278. context = f"{context.strip()}\n<START>\n"
  279. if 'example_dialogue' in data and data['example_dialogue'] != '':
  280. history['internal'] = tokenize_dialogue(data['example_dialogue'], name1, name2)
  281. if 'char_greeting' in data and len(data['char_greeting'].strip()) > 0:
  282. history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', data['char_greeting']]]
  283. history['visible'] += [['', apply_extensions(data['char_greeting'], "output")]]
  284. else:
  285. history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', "Hello there!"]]
  286. history['visible'] += [['', "Hello there!"]]
  287. else:
  288. character = None
  289. context = shared.settings['context_pygmalion']
  290. name2 = shared.settings['name2_pygmalion']
  291. if Path(f'logs/{character}_persistent.json').exists():
  292. load_history(open(Path(f'logs/{character}_persistent.json'), 'rb').read(), name1, name2)
  293. if shared.args.cai_chat:
  294. return name2, context, generate_chat_html(history['visible'], name1, name2, character)
  295. else:
  296. return name2, context, history['visible']
  297. def upload_character(json_file, img, tavern=False):
  298. json_file = json_file if type(json_file) == str else json_file.decode('utf-8')
  299. data = json.loads(json_file)
  300. outfile_name = data["char_name"]
  301. i = 1
  302. while Path(f'characters/{outfile_name}.json').exists():
  303. outfile_name = f'{data["char_name"]}_{i:03d}'
  304. i += 1
  305. if tavern:
  306. outfile_name = f'TavernAI-{outfile_name}'
  307. with open(Path(f'characters/{outfile_name}.json'), 'w') as f:
  308. f.write(json_file)
  309. if img is not None:
  310. img = Image.open(io.BytesIO(img))
  311. img.save(Path(f'characters/{outfile_name}.png'))
  312. print(f'New character saved to "characters/{outfile_name}.json".')
  313. return outfile_name
  314. def upload_tavern_character(img, name1, name2):
  315. _img = Image.open(io.BytesIO(img))
  316. _img.getexif()
  317. decoded_string = base64.b64decode(_img.info['chara'])
  318. _json = json.loads(decoded_string)
  319. _json = {"char_name": _json['name'], "char_persona": _json['description'], "char_greeting": _json["first_mes"], "example_dialogue": _json['mes_example'], "world_scenario": _json['scenario']}
  320. _json['example_dialogue'] = _json['example_dialogue'].replace('{{user}}', name1).replace('{{char}}', _json['char_name'])
  321. return upload_character(json.dumps(_json), img, tavern=True)
  322. def upload_your_profile_picture(img):
  323. img = Image.open(io.BytesIO(img))
  324. img.save(Path(f'img_me.png'))
  325. print(f'Profile picture saved to "img_me.png"')