chat.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433
  1. import base64
  2. import copy
  3. import io
  4. import json
  5. import re
  6. from datetime import datetime
  7. from pathlib import Path
  8. import yaml
  9. from PIL import Image
  10. import modules.extensions as extensions_module
  11. import modules.shared as shared
  12. from modules.extensions import apply_extensions
  13. from modules.html_generator import (fix_newlines, chat_html_wrapper,
  14. make_thumbnail)
  15. from modules.text_generation import (encode, generate_reply,
  16. get_max_prompt_length)
  17. def generate_chat_prompt(user_input, max_new_tokens, name1, name2, context, chat_prompt_size, is_instruct, end_of_turn="", impersonate=False, also_return_rows=False):
  18. user_input = fix_newlines(user_input)
  19. rows = [f"{context.strip()}\n"]
  20. # Finding the maximum prompt size
  21. if shared.soft_prompt:
  22. chat_prompt_size -= shared.soft_prompt_tensor.shape[1]
  23. max_length = min(get_max_prompt_length(max_new_tokens), chat_prompt_size)
  24. if is_instruct:
  25. prefix1 = f"{name1}\n"
  26. prefix2 = f"{name2}\n"
  27. else:
  28. prefix1 = f"{name1}: "
  29. prefix2 = f"{name2}: "
  30. i = len(shared.history['internal'])-1
  31. while i >= 0 and len(encode(''.join(rows), max_new_tokens)[0]) < max_length:
  32. rows.insert(1, f"{prefix2}{shared.history['internal'][i][1].strip()}{end_of_turn}\n")
  33. string = shared.history['internal'][i][0]
  34. if string not in ['', '<|BEGIN-VISIBLE-CHAT|>']:
  35. rows.insert(1, f"{prefix1}{string.strip()}{end_of_turn}\n")
  36. i -= 1
  37. if impersonate:
  38. rows.append(f"{prefix1.strip() if not is_instruct else prefix1}")
  39. limit = 2
  40. else:
  41. # Adding the user message
  42. if len(user_input) > 0:
  43. rows.append(f"{prefix1}{user_input}{end_of_turn}\n")
  44. # Adding the Character prefix
  45. rows.append(apply_extensions(f"{prefix2.strip() if not is_instruct else prefix2}", "bot_prefix"))
  46. limit = 3
  47. while len(rows) > limit and len(encode(''.join(rows), max_new_tokens)[0]) >= max_length:
  48. rows.pop(1)
  49. prompt = ''.join(rows)
  50. if also_return_rows:
  51. return prompt, rows
  52. else:
  53. return prompt
  54. def extract_message_from_reply(reply, name1, name2, stop_at_newline):
  55. next_character_found = False
  56. if stop_at_newline:
  57. lines = reply.split('\n')
  58. reply = lines[0].strip()
  59. if len(lines) > 1:
  60. next_character_found = True
  61. else:
  62. for string in [f"\n{name1}:", f"\n{name2}:"]:
  63. idx = reply.find(string)
  64. if idx != -1:
  65. reply = reply[:idx]
  66. next_character_found = True
  67. # If something like "\nYo" is generated just before "\nYou:"
  68. # is completed, trim it
  69. if not next_character_found:
  70. for string in [f"\n{name1}:", f"\n{name2}:"]:
  71. for j in range(len(string)-1, 0, -1):
  72. if reply[-j:] == string[:j]:
  73. reply = reply[:-j]
  74. break
  75. reply = fix_newlines(reply)
  76. return reply, next_character_found
  77. def chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, stop_at_newline, chat_prompt_size, chat_generation_attempts=1, regenerate=False, mode="cai-chat", end_of_turn=""):
  78. just_started = True
  79. eos_token = '\n' if stop_at_newline else None
  80. name1_original = name1
  81. if 'pygmalion' in shared.model_name.lower():
  82. name1 = "You"
  83. # Check if any extension wants to hijack this function call
  84. visible_text = None
  85. custom_generate_chat_prompt = None
  86. for extension, _ in extensions_module.iterator():
  87. if hasattr(extension, 'input_hijack') and extension.input_hijack['state'] == True:
  88. extension.input_hijack['state'] = False
  89. text, visible_text = extension.input_hijack['value']
  90. if custom_generate_chat_prompt is None and hasattr(extension, 'custom_generate_chat_prompt'):
  91. custom_generate_chat_prompt = extension.custom_generate_chat_prompt
  92. if visible_text is None:
  93. visible_text = text
  94. text = apply_extensions(text, "input")
  95. is_instruct = mode == 'instruct'
  96. if custom_generate_chat_prompt is None:
  97. prompt = generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size, is_instruct, end_of_turn=end_of_turn)
  98. else:
  99. prompt = custom_generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size, is_instruct, end_of_turn=end_of_turn)
  100. # Yield *Is typing...*
  101. if not regenerate:
  102. yield shared.history['visible']+[[visible_text, shared.processing_message]]
  103. # Generate
  104. cumulative_reply = ''
  105. for i in range(chat_generation_attempts):
  106. reply = None
  107. for reply in generate_reply(f"{prompt}{' ' if len(cumulative_reply) > 0 else ''}{cumulative_reply}", max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, eos_token=eos_token, stopping_strings=[f"\n{name1}:", f"\n{name2}:"]):
  108. reply = cumulative_reply + reply
  109. # Extracting the reply
  110. reply, next_character_found = extract_message_from_reply(reply, name1, name2, stop_at_newline)
  111. visible_reply = re.sub("(<USER>|<user>|{{user}})", name1_original, reply)
  112. visible_reply = apply_extensions(visible_reply, "output")
  113. # We need this global variable to handle the Stop event,
  114. # otherwise gradio gets confused
  115. if shared.stop_everything:
  116. return shared.history['visible']
  117. if just_started:
  118. just_started = False
  119. shared.history['internal'].append(['', ''])
  120. shared.history['visible'].append(['', ''])
  121. shared.history['internal'][-1] = [text, reply]
  122. shared.history['visible'][-1] = [visible_text, visible_reply]
  123. if not shared.args.no_stream:
  124. yield shared.history['visible']
  125. if next_character_found:
  126. break
  127. if reply is not None:
  128. cumulative_reply = reply
  129. yield shared.history['visible']
  130. def impersonate_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, stop_at_newline, chat_prompt_size, chat_generation_attempts=1, mode="cai-chat", end_of_turn=""):
  131. eos_token = '\n' if stop_at_newline else None
  132. if 'pygmalion' in shared.model_name.lower():
  133. name1 = "You"
  134. prompt = generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size, impersonate=True, end_of_turn=end_of_turn)
  135. # Yield *Is typing...*
  136. yield shared.processing_message
  137. cumulative_reply = ''
  138. for i in range(chat_generation_attempts):
  139. reply = None
  140. for reply in generate_reply(f"{prompt}{' ' if len(cumulative_reply) > 0 else ''}{cumulative_reply}", max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, eos_token=eos_token, stopping_strings=[f"\n{name1}:", f"\n{name2}:"]):
  141. reply = cumulative_reply + reply
  142. reply, next_character_found = extract_message_from_reply(reply, name1, name2, stop_at_newline)
  143. yield reply
  144. if next_character_found:
  145. break
  146. if reply is not None:
  147. cumulative_reply = reply
  148. yield reply
  149. def cai_chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, stop_at_newline, chat_prompt_size, chat_generation_attempts=1, mode="cai-chat", end_of_turn=""):
  150. for history in chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, stop_at_newline, chat_prompt_size, chat_generation_attempts, regenerate=False, mode=mode, end_of_turn=end_of_turn):
  151. yield chat_html_wrapper(history, name1, name2, mode)
  152. def regenerate_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, stop_at_newline, chat_prompt_size, chat_generation_attempts=1, mode="cai-chat", end_of_turn=""):
  153. if (shared.character != 'None' and len(shared.history['visible']) == 1) or len(shared.history['internal']) == 0:
  154. yield chat_html_wrapper(shared.history['visible'], name1, name2, mode)
  155. else:
  156. last_visible = shared.history['visible'].pop()
  157. last_internal = shared.history['internal'].pop()
  158. # Yield '*Is typing...*'
  159. yield chat_html_wrapper(shared.history['visible']+[[last_visible[0], shared.processing_message]], name1, name2, mode)
  160. for history in chatbot_wrapper(last_internal[0], max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, stop_at_newline, chat_prompt_size, chat_generation_attempts, regenerate=True, mode=mode, end_of_turn=end_of_turn):
  161. shared.history['visible'][-1] = [last_visible[0], history[-1][1]]
  162. yield chat_html_wrapper(shared.history['visible'], name1, name2, mode)
  163. def remove_last_message(name1, name2, mode):
  164. if len(shared.history['visible']) > 0 and shared.history['internal'][-1][0] != '<|BEGIN-VISIBLE-CHAT|>':
  165. last = shared.history['visible'].pop()
  166. shared.history['internal'].pop()
  167. else:
  168. last = ['', '']
  169. return chat_html_wrapper(shared.history['visible'], name1, name2, mode), last[0]
  170. def send_last_reply_to_input():
  171. if len(shared.history['internal']) > 0:
  172. return shared.history['internal'][-1][1]
  173. else:
  174. return ''
  175. def replace_last_reply(text, name1, name2, mode):
  176. if len(shared.history['visible']) > 0:
  177. shared.history['visible'][-1][1] = text
  178. shared.history['internal'][-1][1] = apply_extensions(text, "input")
  179. return chat_html_wrapper(shared.history['visible'], name1, name2, mode)
  180. def clear_html():
  181. return chat_html_wrapper([], "", "")
  182. def clear_chat_log(name1, name2, greeting, mode):
  183. shared.history['visible'] = []
  184. shared.history['internal'] = []
  185. if greeting != '':
  186. shared.history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', greeting]]
  187. shared.history['visible'] += [['', apply_extensions(greeting, "output")]]
  188. return chat_html_wrapper(shared.history['visible'], name1, name2, mode)
  189. def redraw_html(name1, name2, mode):
  190. return chat_html_wrapper(shared.history['visible'], name1, name2, mode)
  191. def tokenize_dialogue(dialogue, name1, name2, mode):
  192. history = []
  193. dialogue = re.sub('<START>', '', dialogue)
  194. dialogue = re.sub('<start>', '', dialogue)
  195. dialogue = re.sub('(\n|^)[Aa]non:', '\\1You:', dialogue)
  196. dialogue = re.sub('(\n|^)\[CHARACTER\]:', f'\\g<1>{name2}:', dialogue)
  197. idx = [m.start() for m in re.finditer(f"(^|\n)({re.escape(name1)}|{re.escape(name2)}):", dialogue)]
  198. if len(idx) == 0:
  199. return history
  200. messages = []
  201. for i in range(len(idx)-1):
  202. messages.append(dialogue[idx[i]:idx[i+1]].strip())
  203. messages.append(dialogue[idx[-1]:].strip())
  204. entry = ['', '']
  205. for i in messages:
  206. if i.startswith(f'{name1}:'):
  207. entry[0] = i[len(f'{name1}:'):].strip()
  208. elif i.startswith(f'{name2}:'):
  209. entry[1] = i[len(f'{name2}:'):].strip()
  210. if not (len(entry[0]) == 0 and len(entry[1]) == 0):
  211. history.append(entry)
  212. entry = ['', '']
  213. print("\033[1;32;1m\nDialogue tokenized to:\033[0;37;0m\n", end='')
  214. for row in history:
  215. for column in row:
  216. print("\n")
  217. for line in column.strip().split('\n'):
  218. print("| "+line+"\n")
  219. print("|\n")
  220. print("------------------------------")
  221. return history
  222. def save_history(timestamp=True):
  223. if timestamp:
  224. fname = f"{shared.character}_{datetime.now().strftime('%Y%m%d-%H%M%S')}.json"
  225. else:
  226. fname = f"{shared.character}_persistent.json"
  227. if not Path('logs').exists():
  228. Path('logs').mkdir()
  229. with open(Path(f'logs/{fname}'), 'w', encoding='utf-8') as f:
  230. f.write(json.dumps({'data': shared.history['internal'], 'data_visible': shared.history['visible']}, indent=2))
  231. return Path(f'logs/{fname}')
  232. def load_history(file, name1, name2):
  233. file = file.decode('utf-8')
  234. try:
  235. j = json.loads(file)
  236. if 'data' in j:
  237. shared.history['internal'] = j['data']
  238. if 'data_visible' in j:
  239. shared.history['visible'] = j['data_visible']
  240. else:
  241. shared.history['visible'] = copy.deepcopy(shared.history['internal'])
  242. # Compatibility with Pygmalion AI's official web UI
  243. elif 'chat' in j:
  244. shared.history['internal'] = [':'.join(x.split(':')[1:]).strip() for x in j['chat']]
  245. if len(j['chat']) > 0 and j['chat'][0].startswith(f'{name2}:'):
  246. shared.history['internal'] = [['<|BEGIN-VISIBLE-CHAT|>', shared.history['internal'][0]]] + [[shared.history['internal'][i], shared.history['internal'][i+1]] for i in range(1, len(shared.history['internal'])-1, 2)]
  247. shared.history['visible'] = copy.deepcopy(shared.history['internal'])
  248. shared.history['visible'][0][0] = ''
  249. else:
  250. shared.history['internal'] = [[shared.history['internal'][i], shared.history['internal'][i+1]] for i in range(0, len(shared.history['internal'])-1, 2)]
  251. shared.history['visible'] = copy.deepcopy(shared.history['internal'])
  252. except:
  253. shared.history['internal'] = tokenize_dialogue(file, name1, name2)
  254. shared.history['visible'] = copy.deepcopy(shared.history['internal'])
  255. def replace_character_names(text, name1, name2):
  256. text = text.replace('{{user}}', name1).replace('{{char}}', name2)
  257. return text.replace('<USER>', name1).replace('<BOT>', name2)
  258. def build_pygmalion_style_context(data):
  259. context = ""
  260. if 'char_persona' in data and data['char_persona'] != '':
  261. context += f"{data['char_name']}'s Persona: {data['char_persona']}\n"
  262. if 'world_scenario' in data and data['world_scenario'] != '':
  263. context += f"Scenario: {data['world_scenario']}\n"
  264. context = f"{context.strip()}\n<START>\n"
  265. return context
  266. def generate_pfp_cache(character):
  267. cache_folder = Path("cache")
  268. if not cache_folder.exists():
  269. cache_folder.mkdir()
  270. for path in [Path(f"characters/{character}.{extension}") for extension in ['png', 'jpg', 'jpeg']]:
  271. if path.exists():
  272. img = make_thumbnail(Image.open(path))
  273. img.save(Path('cache/pfp_character.png'), format='PNG')
  274. return img
  275. return None
  276. def load_character(character, name1, name2, mode):
  277. shared.character = character
  278. shared.history['internal'] = []
  279. shared.history['visible'] = []
  280. context = greeting = end_of_turn = ""
  281. greeting_field = 'greeting'
  282. picture = None
  283. # Deleting the profile picture cache, if any
  284. if Path("cache/pfp_character.png").exists():
  285. Path("cache/pfp_character.png").unlink()
  286. if character != 'None':
  287. folder = 'characters' if not mode == 'instruct' else 'characters/instruction-following'
  288. picture = generate_pfp_cache(character)
  289. for extension in ["yml", "yaml", "json"]:
  290. filepath = Path(f'{folder}/{character}.{extension}')
  291. if filepath.exists():
  292. break
  293. file_contents = open(filepath, 'r', encoding='utf-8').read()
  294. data = json.loads(file_contents) if extension == "json" else yaml.safe_load(file_contents)
  295. if 'your_name' in data and data['your_name'] != '':
  296. name1 = data['your_name']
  297. name2 = data['name'] if 'name' in data else data['char_name']
  298. for field in ['context', 'greeting', 'example_dialogue', 'char_persona', 'char_greeting', 'world_scenario']:
  299. if field in data:
  300. data[field] = replace_character_names(data[field], name1, name2)
  301. if 'context' in data:
  302. context = f"{data['context'].strip()}\n\n"
  303. elif "char_persona" in data:
  304. context = build_pygmalion_style_context(data)
  305. greeting_field = 'char_greeting'
  306. if 'example_dialogue' in data:
  307. context += f"{data['example_dialogue'].strip()}\n"
  308. if greeting_field in data:
  309. greeting = data[greeting_field]
  310. if 'end_of_turn' in data:
  311. end_of_turn = data['end_of_turn']
  312. else:
  313. context = shared.settings['context']
  314. name2 = shared.settings['name2']
  315. greeting = shared.settings['greeting']
  316. end_of_turn = shared.settings['end_of_turn']
  317. if Path(f'logs/{shared.character}_persistent.json').exists():
  318. load_history(open(Path(f'logs/{shared.character}_persistent.json'), 'rb').read(), name1, name2)
  319. elif greeting != "":
  320. shared.history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', greeting]]
  321. shared.history['visible'] += [['', apply_extensions(greeting, "output")]]
  322. return name1, name2, picture, greeting, context, end_of_turn, chat_html_wrapper(shared.history['visible'], name1, name2, mode, reset_cache=True)
  323. def load_default_history(name1, name2):
  324. load_character("None", name1, name2, "chat")
  325. def upload_character(json_file, img, tavern=False):
  326. json_file = json_file if type(json_file) == str else json_file.decode('utf-8')
  327. data = json.loads(json_file)
  328. outfile_name = data["char_name"]
  329. i = 1
  330. while Path(f'characters/{outfile_name}.json').exists():
  331. outfile_name = f'{data["char_name"]}_{i:03d}'
  332. i += 1
  333. if tavern:
  334. outfile_name = f'TavernAI-{outfile_name}'
  335. with open(Path(f'characters/{outfile_name}.json'), 'w', encoding='utf-8') as f:
  336. f.write(json_file)
  337. if img is not None:
  338. img = Image.open(io.BytesIO(img))
  339. img.save(Path(f'characters/{outfile_name}.png'))
  340. print(f'New character saved to "characters/{outfile_name}.json".')
  341. return outfile_name
  342. def upload_tavern_character(img, name1, name2):
  343. _img = Image.open(io.BytesIO(img))
  344. _img.getexif()
  345. decoded_string = base64.b64decode(_img.info['chara'])
  346. _json = json.loads(decoded_string)
  347. _json = {"char_name": _json['name'], "char_persona": _json['description'], "char_greeting": _json["first_mes"], "example_dialogue": _json['mes_example'], "world_scenario": _json['scenario']}
  348. return upload_character(json.dumps(_json), img, tavern=True)
  349. def upload_your_profile_picture(img, name1, name2, mode):
  350. cache_folder = Path("cache")
  351. if not cache_folder.exists():
  352. cache_folder.mkdir()
  353. if img == None:
  354. if Path("cache/pfp_me.png").exists():
  355. Path("cache/pfp_me.png").unlink()
  356. else:
  357. img = make_thumbnail(img)
  358. img.save(Path('cache/pfp_me.png'))
  359. print('Profile picture saved to "cache/pfp_me.png"')
  360. return chat_html_wrapper(shared.history['visible'], name1, name2, mode, reset_cache=True)