chat.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442
  1. import base64
  2. import copy
  3. import io
  4. import json
  5. import re
  6. from datetime import datetime
  7. from pathlib import Path
  8. import yaml
  9. from PIL import Image
  10. import modules.extensions as extensions_module
  11. import modules.shared as shared
  12. from modules.extensions import apply_extensions
  13. from modules.html_generator import (fix_newlines, generate_chat_html,
  14. make_thumbnail)
  15. from modules.text_generation import (encode, generate_reply,
  16. get_max_prompt_length)
  17. def generate_chat_output(history, name1, name2):
  18. if shared.args.cai_chat:
  19. return generate_chat_html(history, name1, name2)
  20. else:
  21. return history
  22. def generate_chat_prompt(user_input, max_new_tokens, name1, name2, context, chat_prompt_size, impersonate=False, also_return_rows=False):
  23. user_input = fix_newlines(user_input)
  24. rows = [f"{context.strip()}\n"]
  25. if shared.soft_prompt:
  26. chat_prompt_size -= shared.soft_prompt_tensor.shape[1]
  27. max_length = min(get_max_prompt_length(max_new_tokens), chat_prompt_size)
  28. i = len(shared.history['internal'])-1
  29. while i >= 0 and len(encode(''.join(rows), max_new_tokens)[0]) < max_length:
  30. rows.insert(1, f"{name2}: {shared.history['internal'][i][1].strip()}\n")
  31. prev_user_input = shared.history['internal'][i][0]
  32. if prev_user_input not in ['', '<|BEGIN-VISIBLE-CHAT|>']:
  33. rows.insert(1, f"{name1}: {prev_user_input.strip()}\n")
  34. i -= 1
  35. if not impersonate:
  36. if len(user_input) > 0:
  37. rows.append(f"{name1}: {user_input}\n")
  38. rows.append(apply_extensions(f"{name2}:", "bot_prefix"))
  39. limit = 3
  40. else:
  41. rows.append(f"{name1}:")
  42. limit = 2
  43. while len(rows) > limit and len(encode(''.join(rows), max_new_tokens)[0]) >= max_length:
  44. rows.pop(1)
  45. prompt = ''.join(rows)
  46. if also_return_rows:
  47. return prompt, rows
  48. else:
  49. return prompt
  50. def extract_message_from_reply(reply, name1, name2, stop_at_newline):
  51. next_character_found = False
  52. if stop_at_newline:
  53. lines = reply.split('\n')
  54. reply = lines[0].strip()
  55. if len(lines) > 1:
  56. next_character_found = True
  57. else:
  58. for string in [f"\n{name1}:", f"\n{name2}:"]:
  59. idx = reply.find(string)
  60. if idx != -1:
  61. reply = reply[:idx]
  62. next_character_found = True
  63. # If something like "\nYo" is generated just before "\nYou:"
  64. # is completed, trim it
  65. if not next_character_found:
  66. for string in [f"\n{name1}:", f"\n{name2}:"]:
  67. for j in range(len(string)-1, 0, -1):
  68. if reply[-j:] == string[:j]:
  69. reply = reply[:-j]
  70. break
  71. reply = fix_newlines(reply)
  72. return reply, next_character_found
  73. def chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, stop_at_newline, chat_prompt_size, chat_generation_attempts=1, regenerate=False):
  74. just_started = True
  75. eos_token = '\n' if stop_at_newline else None
  76. name1_original = name1
  77. if 'pygmalion' in shared.model_name.lower():
  78. name1 = "You"
  79. # Check if any extension wants to hijack this function call
  80. visible_text = None
  81. custom_generate_chat_prompt = None
  82. for extension, _ in extensions_module.iterator():
  83. if hasattr(extension, 'input_hijack') and extension.input_hijack['state'] == True:
  84. extension.input_hijack['state'] = False
  85. text, visible_text = extension.input_hijack['value']
  86. if custom_generate_chat_prompt is None and hasattr(extension, 'custom_generate_chat_prompt'):
  87. custom_generate_chat_prompt = extension.custom_generate_chat_prompt
  88. if visible_text is None:
  89. visible_text = text
  90. if shared.args.chat:
  91. visible_text = visible_text.replace('\n', '<br>')
  92. text = apply_extensions(text, "input")
  93. if custom_generate_chat_prompt is None:
  94. prompt = generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size)
  95. else:
  96. prompt = custom_generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size)
  97. # Yield *Is typing...*
  98. if not regenerate:
  99. yield shared.history['visible']+[[visible_text, shared.processing_message]]
  100. # Generate
  101. cumulative_reply = ''
  102. for i in range(chat_generation_attempts):
  103. reply = None
  104. for reply in generate_reply(f"{prompt}{' ' if len(cumulative_reply) > 0 else ''}{cumulative_reply}", max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, eos_token=eos_token, stopping_strings=[f"\n{name1}:", f"\n{name2}:"]):
  105. reply = cumulative_reply + reply
  106. # Extracting the reply
  107. reply, next_character_found = extract_message_from_reply(reply, name1, name2, stop_at_newline)
  108. visible_reply = re.sub("(<USER>|<user>|{{user}})", name1_original, reply)
  109. visible_reply = apply_extensions(visible_reply, "output")
  110. if shared.args.chat:
  111. visible_reply = visible_reply.replace('\n', '<br>')
  112. # We need this global variable to handle the Stop event,
  113. # otherwise gradio gets confused
  114. if shared.stop_everything:
  115. return shared.history['visible']
  116. if just_started:
  117. just_started = False
  118. shared.history['internal'].append(['', ''])
  119. shared.history['visible'].append(['', ''])
  120. shared.history['internal'][-1] = [text, reply]
  121. shared.history['visible'][-1] = [visible_text, visible_reply]
  122. if not shared.args.no_stream:
  123. yield shared.history['visible']
  124. if next_character_found:
  125. break
  126. if reply is not None:
  127. cumulative_reply = reply
  128. yield shared.history['visible']
  129. def impersonate_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, stop_at_newline, chat_prompt_size, chat_generation_attempts=1):
  130. eos_token = '\n' if stop_at_newline else None
  131. if 'pygmalion' in shared.model_name.lower():
  132. name1 = "You"
  133. prompt = generate_chat_prompt(text, max_new_tokens, name1, name2, context, chat_prompt_size, impersonate=True)
  134. # Yield *Is typing...*
  135. yield shared.processing_message
  136. cumulative_reply = ''
  137. for i in range(chat_generation_attempts):
  138. reply = None
  139. for reply in generate_reply(f"{prompt}{' ' if len(cumulative_reply) > 0 else ''}{cumulative_reply}", max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, eos_token=eos_token, stopping_strings=[f"\n{name1}:", f"\n{name2}:"]):
  140. reply = cumulative_reply + reply
  141. reply, next_character_found = extract_message_from_reply(reply, name1, name2, stop_at_newline)
  142. yield reply
  143. if next_character_found:
  144. break
  145. if reply is not None:
  146. cumulative_reply = reply
  147. yield reply
  148. def cai_chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, stop_at_newline, chat_prompt_size, chat_generation_attempts=1):
  149. for history in chatbot_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, stop_at_newline, chat_prompt_size, chat_generation_attempts):
  150. yield generate_chat_html(history, name1, name2)
  151. def regenerate_wrapper(text, max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, stop_at_newline, chat_prompt_size, chat_generation_attempts=1):
  152. if (shared.character != 'None' and len(shared.history['visible']) == 1) or len(shared.history['internal']) == 0:
  153. yield generate_chat_output(shared.history['visible'], name1, name2)
  154. else:
  155. last_visible = shared.history['visible'].pop()
  156. last_internal = shared.history['internal'].pop()
  157. # Yield '*Is typing...*'
  158. yield generate_chat_output(shared.history['visible']+[[last_visible[0], shared.processing_message]], name1, name2)
  159. for history in chatbot_wrapper(last_internal[0], max_new_tokens, do_sample, temperature, top_p, typical_p, repetition_penalty, encoder_repetition_penalty, top_k, min_length, no_repeat_ngram_size, num_beams, penalty_alpha, length_penalty, early_stopping, seed, name1, name2, context, stop_at_newline, chat_prompt_size, chat_generation_attempts, regenerate=True):
  160. if shared.args.cai_chat:
  161. shared.history['visible'][-1] = [last_visible[0], history[-1][1]]
  162. else:
  163. shared.history['visible'][-1] = (last_visible[0], history[-1][1])
  164. yield generate_chat_output(shared.history['visible'], name1, name2)
  165. def remove_last_message(name1, name2):
  166. if len(shared.history['visible']) > 0 and shared.history['internal'][-1][0] != '<|BEGIN-VISIBLE-CHAT|>':
  167. last = shared.history['visible'].pop()
  168. shared.history['internal'].pop()
  169. else:
  170. last = ['', '']
  171. if shared.args.cai_chat:
  172. return generate_chat_html(shared.history['visible'], name1, name2), last[0]
  173. else:
  174. return shared.history['visible'], last[0]
  175. def send_last_reply_to_input():
  176. if len(shared.history['internal']) > 0:
  177. return shared.history['internal'][-1][1]
  178. else:
  179. return ''
  180. def replace_last_reply(text, name1, name2):
  181. if len(shared.history['visible']) > 0:
  182. if shared.args.cai_chat:
  183. shared.history['visible'][-1][1] = text
  184. else:
  185. shared.history['visible'][-1] = (shared.history['visible'][-1][0], text)
  186. shared.history['internal'][-1][1] = apply_extensions(text, "input")
  187. return generate_chat_output(shared.history['visible'], name1, name2)
  188. def clear_html():
  189. return generate_chat_html([], "", "")
  190. def clear_chat_log(name1, name2, greeting):
  191. shared.history['visible'] = []
  192. shared.history['internal'] = []
  193. if greeting != '':
  194. shared.history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', greeting]]
  195. shared.history['visible'] += [['', apply_extensions(greeting, "output")]]
  196. return generate_chat_output(shared.history['visible'], name1, name2)
  197. def redraw_html(name1, name2):
  198. return generate_chat_html(shared.history['visible'], name1, name2)
  199. def tokenize_dialogue(dialogue, name1, name2):
  200. history = []
  201. dialogue = re.sub('<START>', '', dialogue)
  202. dialogue = re.sub('<start>', '', dialogue)
  203. dialogue = re.sub('(\n|^)[Aa]non:', '\\1You:', dialogue)
  204. dialogue = re.sub('(\n|^)\[CHARACTER\]:', f'\\g<1>{name2}:', dialogue)
  205. idx = [m.start() for m in re.finditer(f"(^|\n)({re.escape(name1)}|{re.escape(name2)}):", dialogue)]
  206. if len(idx) == 0:
  207. return history
  208. messages = []
  209. for i in range(len(idx)-1):
  210. messages.append(dialogue[idx[i]:idx[i+1]].strip())
  211. messages.append(dialogue[idx[-1]:].strip())
  212. entry = ['', '']
  213. for i in messages:
  214. if i.startswith(f'{name1}:'):
  215. entry[0] = i[len(f'{name1}:'):].strip()
  216. elif i.startswith(f'{name2}:'):
  217. entry[1] = i[len(f'{name2}:'):].strip()
  218. if not (len(entry[0]) == 0 and len(entry[1]) == 0):
  219. history.append(entry)
  220. entry = ['', '']
  221. print("\033[1;32;1m\nDialogue tokenized to:\033[0;37;0m\n", end='')
  222. for row in history:
  223. for column in row:
  224. print("\n")
  225. for line in column.strip().split('\n'):
  226. print("| "+line+"\n")
  227. print("|\n")
  228. print("------------------------------")
  229. return history
  230. def save_history(timestamp=True):
  231. if timestamp:
  232. fname = f"{shared.character}_{datetime.now().strftime('%Y%m%d-%H%M%S')}.json"
  233. else:
  234. fname = f"{shared.character}_persistent.json"
  235. if not Path('logs').exists():
  236. Path('logs').mkdir()
  237. with open(Path(f'logs/{fname}'), 'w', encoding='utf-8') as f:
  238. f.write(json.dumps({'data': shared.history['internal'], 'data_visible': shared.history['visible']}, indent=2))
  239. return Path(f'logs/{fname}')
  240. def load_history(file, name1, name2):
  241. file = file.decode('utf-8')
  242. try:
  243. j = json.loads(file)
  244. if 'data' in j:
  245. shared.history['internal'] = j['data']
  246. if 'data_visible' in j:
  247. shared.history['visible'] = j['data_visible']
  248. else:
  249. shared.history['visible'] = copy.deepcopy(shared.history['internal'])
  250. # Compatibility with Pygmalion AI's official web UI
  251. elif 'chat' in j:
  252. shared.history['internal'] = [':'.join(x.split(':')[1:]).strip() for x in j['chat']]
  253. if len(j['chat']) > 0 and j['chat'][0].startswith(f'{name2}:'):
  254. shared.history['internal'] = [['<|BEGIN-VISIBLE-CHAT|>', shared.history['internal'][0]]] + [[shared.history['internal'][i], shared.history['internal'][i+1]] for i in range(1, len(shared.history['internal'])-1, 2)]
  255. shared.history['visible'] = copy.deepcopy(shared.history['internal'])
  256. shared.history['visible'][0][0] = ''
  257. else:
  258. shared.history['internal'] = [[shared.history['internal'][i], shared.history['internal'][i+1]] for i in range(0, len(shared.history['internal'])-1, 2)]
  259. shared.history['visible'] = copy.deepcopy(shared.history['internal'])
  260. except:
  261. shared.history['internal'] = tokenize_dialogue(file, name1, name2)
  262. shared.history['visible'] = copy.deepcopy(shared.history['internal'])
  263. def replace_character_names(text, name1, name2):
  264. text = text.replace('{{user}}', name1).replace('{{char}}', name2)
  265. return text.replace('<USER>', name1).replace('<BOT>', name2)
  266. def build_pygmalion_style_context(data):
  267. context = ""
  268. if 'char_persona' in data and data['char_persona'] != '':
  269. context += f"{data['char_name']}'s Persona: {data['char_persona']}\n"
  270. if 'world_scenario' in data and data['world_scenario'] != '':
  271. context += f"Scenario: {data['world_scenario']}\n"
  272. context = f"{context.strip()}\n<START>\n"
  273. return context
  274. def generate_pfp_cache(character):
  275. cache_folder = Path("cache")
  276. if not cache_folder.exists():
  277. cache_folder.mkdir()
  278. for path in [Path(f"characters/{character}.{extension}") for extension in ['png', 'jpg', 'jpeg']]:
  279. if path.exists():
  280. img = make_thumbnail(Image.open(path))
  281. img.save(Path('cache/pfp_character.png'), format='PNG')
  282. return img
  283. return None
  284. def load_character(character, name1, name2):
  285. shared.character = character
  286. shared.history['internal'] = []
  287. shared.history['visible'] = []
  288. greeting = ""
  289. picture = None
  290. # Deleting the profile picture cache, if any
  291. if Path("cache/pfp_character.png").exists():
  292. Path("cache/pfp_character.png").unlink()
  293. if character != 'None':
  294. picture = generate_pfp_cache(character)
  295. for extension in ["yml", "yaml", "json"]:
  296. filepath = Path(f'characters/{character}.{extension}')
  297. if filepath.exists():
  298. break
  299. file_contents = open(filepath, 'r', encoding='utf-8').read()
  300. data = json.loads(file_contents) if extension == "json" else yaml.safe_load(file_contents)
  301. if 'your_name' in data and data['your_name'] != '':
  302. name1 = data['your_name']
  303. name2 = data['name'] if 'name' in data else data['char_name']
  304. for field in ['context', 'greeting', 'example_dialogue', 'char_persona', 'char_greeting', 'world_scenario']:
  305. if field in data:
  306. data[field] = replace_character_names(data[field], name1, name2)
  307. if 'context' in data:
  308. context = f"{data['context'].strip()}\n\n"
  309. greeting_field = 'greeting'
  310. else:
  311. context = build_pygmalion_style_context(data)
  312. greeting_field = 'char_greeting'
  313. if 'example_dialogue' in data and data['example_dialogue'] != '':
  314. context += f"{data['example_dialogue'].strip()}\n"
  315. if greeting_field in data and len(data[greeting_field].strip()) > 0:
  316. greeting = data[greeting_field]
  317. else:
  318. context = shared.settings['context']
  319. name2 = shared.settings['name2']
  320. greeting = shared.settings['greeting']
  321. if Path(f'logs/{shared.character}_persistent.json').exists():
  322. load_history(open(Path(f'logs/{shared.character}_persistent.json'), 'rb').read(), name1, name2)
  323. elif greeting != "":
  324. shared.history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', greeting]]
  325. shared.history['visible'] += [['', apply_extensions(greeting, "output")]]
  326. if shared.args.cai_chat:
  327. return name1, name2, picture, greeting, context, generate_chat_html(shared.history['visible'], name1, name2, reset_cache=True)
  328. else:
  329. return name1, name2, picture, greeting, context, shared.history['visible']
  330. def load_default_history(name1, name2):
  331. load_character("None", name1, name2)
  332. def upload_character(json_file, img, tavern=False):
  333. json_file = json_file if type(json_file) == str else json_file.decode('utf-8')
  334. data = json.loads(json_file)
  335. outfile_name = data["char_name"]
  336. i = 1
  337. while Path(f'characters/{outfile_name}.json').exists():
  338. outfile_name = f'{data["char_name"]}_{i:03d}'
  339. i += 1
  340. if tavern:
  341. outfile_name = f'TavernAI-{outfile_name}'
  342. with open(Path(f'characters/{outfile_name}.json'), 'w', encoding='utf-8') as f:
  343. f.write(json_file)
  344. if img is not None:
  345. img = Image.open(io.BytesIO(img))
  346. img.save(Path(f'characters/{outfile_name}.png'))
  347. print(f'New character saved to "characters/{outfile_name}.json".')
  348. return outfile_name
  349. def upload_tavern_character(img, name1, name2):
  350. _img = Image.open(io.BytesIO(img))
  351. _img.getexif()
  352. decoded_string = base64.b64decode(_img.info['chara'])
  353. _json = json.loads(decoded_string)
  354. _json = {"char_name": _json['name'], "char_persona": _json['description'], "char_greeting": _json["first_mes"], "example_dialogue": _json['mes_example'], "world_scenario": _json['scenario']}
  355. return upload_character(json.dumps(_json), img, tavern=True)
  356. def upload_your_profile_picture(img, name1, name2):
  357. cache_folder = Path("cache")
  358. if not cache_folder.exists():
  359. cache_folder.mkdir()
  360. if img == None:
  361. if Path("cache/pfp_me.png").exists():
  362. Path("cache/pfp_me.png").unlink()
  363. else:
  364. img = make_thumbnail(img)
  365. img.save(Path('cache/pfp_me.png'))
  366. print('Profile picture saved to "cache/pfp_me.png"')
  367. if shared.args.cai_chat:
  368. return generate_chat_html(shared.history['visible'], name1, name2, reset_cache=True)
  369. else:
  370. return shared.history['visible']