|
|
@@ -1,3 +1,7 @@
|
|
|
+import os
|
|
|
+
|
|
|
+os.environ['GRADIO_ANALYTICS_ENABLED'] = 'False'
|
|
|
+
|
|
|
import io
|
|
|
import json
|
|
|
import re
|
|
|
@@ -8,10 +12,11 @@ from datetime import datetime
|
|
|
from pathlib import Path
|
|
|
|
|
|
import gradio as gr
|
|
|
+from PIL import Image
|
|
|
|
|
|
import modules.extensions as extensions_module
|
|
|
-from modules import chat, shared, training, ui
|
|
|
-from modules.html_generator import generate_chat_html
|
|
|
+from modules import chat, shared, training, ui, api
|
|
|
+from modules.html_generator import chat_html_wrapper
|
|
|
from modules.LoRA import add_lora_to_model
|
|
|
from modules.models import load_model, load_soft_prompt
|
|
|
from modules.text_generation import (clear_torch_cache, generate_reply,
|
|
|
@@ -47,6 +52,13 @@ def get_available_prompts():
|
|
|
|
|
|
def get_available_characters():
|
|
|
paths = (x for x in Path('characters').iterdir() if x.suffix in ('.json', '.yaml', '.yml'))
|
|
|
+ return ['None'] + sorted(set((k.stem for k in paths if k.stem != "instruction-following")), key=str.lower)
|
|
|
+
|
|
|
+def get_available_instruction_templates():
|
|
|
+ path = "characters/instruction-following"
|
|
|
+ paths = []
|
|
|
+ if os.path.exists(path):
|
|
|
+ paths = (x for x in Path(path).iterdir() if x.suffix in ('.json', '.yaml', '.yml'))
|
|
|
return ['None'] + sorted(set((k.stem for k in paths)), key=str.lower)
|
|
|
|
|
|
def get_available_extensions():
|
|
|
@@ -76,7 +88,7 @@ def load_lora_wrapper(selected_lora):
|
|
|
add_lora_to_model(selected_lora)
|
|
|
return selected_lora
|
|
|
|
|
|
-def load_preset_values(preset_menu, return_dict=False):
|
|
|
+def load_preset_values(preset_menu, state, return_dict=False):
|
|
|
generate_params = {
|
|
|
'do_sample': True,
|
|
|
'temperature': 1,
|
|
|
@@ -98,13 +110,13 @@ def load_preset_values(preset_menu, return_dict=False):
|
|
|
i = i.rstrip(',').strip().split('=')
|
|
|
if len(i) == 2 and i[0].strip() != 'tokens':
|
|
|
generate_params[i[0].strip()] = eval(i[1].strip())
|
|
|
-
|
|
|
generate_params['temperature'] = min(1.99, generate_params['temperature'])
|
|
|
|
|
|
if return_dict:
|
|
|
return generate_params
|
|
|
else:
|
|
|
- return generate_params['do_sample'], generate_params['temperature'], generate_params['top_p'], generate_params['typical_p'], generate_params['repetition_penalty'], generate_params['encoder_repetition_penalty'], generate_params['top_k'], generate_params['min_length'], generate_params['no_repeat_ngram_size'], generate_params['num_beams'], generate_params['penalty_alpha'], generate_params['length_penalty'], generate_params['early_stopping']
|
|
|
+ state.update(generate_params)
|
|
|
+ return state, *[generate_params[k] for k in ['do_sample', 'temperature', 'top_p', 'typical_p', 'repetition_penalty', 'encoder_repetition_penalty', 'top_k', 'min_length', 'no_repeat_ngram_size', 'num_beams', 'penalty_alpha', 'length_penalty', 'early_stopping']]
|
|
|
|
|
|
def upload_soft_prompt(file):
|
|
|
with zipfile.ZipFile(io.BytesIO(file)) as zf:
|
|
|
@@ -118,19 +130,8 @@ def upload_soft_prompt(file):
|
|
|
|
|
|
return name
|
|
|
|
|
|
-def create_model_and_preset_menus():
|
|
|
- with gr.Row():
|
|
|
- with gr.Column():
|
|
|
- with gr.Row():
|
|
|
- shared.gradio['model_menu'] = gr.Dropdown(choices=available_models, value=shared.model_name, label='Model')
|
|
|
- ui.create_refresh_button(shared.gradio['model_menu'], lambda : None, lambda : {'choices': get_available_models()}, 'refresh-button')
|
|
|
- with gr.Column():
|
|
|
- with gr.Row():
|
|
|
- shared.gradio['preset_menu'] = gr.Dropdown(choices=available_presets, value=default_preset if not shared.args.flexgen else 'Naive', label='Generation parameters preset')
|
|
|
- ui.create_refresh_button(shared.gradio['preset_menu'], lambda : None, lambda : {'choices': get_available_presets()}, 'refresh-button')
|
|
|
-
|
|
|
def save_prompt(text):
|
|
|
- fname = f"{datetime.now().strftime('%Y-%m-%d-%H:%M:%S')}.txt"
|
|
|
+ fname = f"{datetime.now().strftime('%Y-%m-%d-%H%M%S')}.txt"
|
|
|
with open(Path(f'prompts/{fname}'), 'w', encoding='utf-8') as f:
|
|
|
f.write(text)
|
|
|
return f"Saved to prompts/{fname}"
|
|
|
@@ -144,7 +145,7 @@ def load_prompt(fname):
|
|
|
if text[-1] == '\n':
|
|
|
text = text[:-1]
|
|
|
return text
|
|
|
-
|
|
|
+
|
|
|
def create_prompt_menus():
|
|
|
with gr.Row():
|
|
|
with gr.Column():
|
|
|
@@ -160,12 +161,31 @@ def create_prompt_menus():
|
|
|
shared.gradio['prompt_menu'].change(load_prompt, [shared.gradio['prompt_menu']], [shared.gradio['textbox']], show_progress=False)
|
|
|
shared.gradio['save_prompt'].click(save_prompt, [shared.gradio['textbox']], [shared.gradio['status']], show_progress=False)
|
|
|
|
|
|
+def create_model_menus():
|
|
|
+ with gr.Row():
|
|
|
+ with gr.Column():
|
|
|
+ with gr.Row():
|
|
|
+ shared.gradio['model_menu'] = gr.Dropdown(choices=available_models, value=shared.model_name, label='Model')
|
|
|
+ ui.create_refresh_button(shared.gradio['model_menu'], lambda : None, lambda : {'choices': get_available_models()}, 'refresh-button')
|
|
|
+ with gr.Column():
|
|
|
+ with gr.Row():
|
|
|
+ shared.gradio['lora_menu'] = gr.Dropdown(choices=available_loras, value=shared.lora_name, label='LoRA')
|
|
|
+ ui.create_refresh_button(shared.gradio['lora_menu'], lambda : None, lambda : {'choices': get_available_loras()}, 'refresh-button')
|
|
|
+
|
|
|
+ shared.gradio['model_menu'].change(load_model_wrapper, shared.gradio['model_menu'], shared.gradio['model_menu'], show_progress=True)
|
|
|
+ shared.gradio['lora_menu'].change(load_lora_wrapper, shared.gradio['lora_menu'], shared.gradio['lora_menu'], show_progress=True)
|
|
|
+
|
|
|
def create_settings_menus(default_preset):
|
|
|
- generate_params = load_preset_values(default_preset if not shared.args.flexgen else 'Naive', return_dict=True)
|
|
|
+ generate_params = load_preset_values(default_preset if not shared.args.flexgen else 'Naive', {}, return_dict=True)
|
|
|
+ for k in ['max_new_tokens', 'seed', 'stop_at_newline', 'chat_prompt_size', 'chat_generation_attempts']:
|
|
|
+ generate_params[k] = shared.settings[k]
|
|
|
+ shared.gradio['generate_state'] = gr.State(generate_params)
|
|
|
|
|
|
with gr.Row():
|
|
|
with gr.Column():
|
|
|
- create_model_and_preset_menus()
|
|
|
+ with gr.Row():
|
|
|
+ shared.gradio['preset_menu'] = gr.Dropdown(choices=available_presets, value=default_preset if not shared.args.flexgen else 'Naive', label='Generation parameters preset')
|
|
|
+ ui.create_refresh_button(shared.gradio['preset_menu'], lambda : None, lambda : {'choices': get_available_presets()}, 'refresh-button')
|
|
|
with gr.Column():
|
|
|
shared.gradio['seed'] = gr.Number(value=shared.settings['seed'], label='Seed (-1 for random)')
|
|
|
|
|
|
@@ -199,9 +219,6 @@ def create_settings_menus(default_preset):
|
|
|
shared.gradio['length_penalty'] = gr.Slider(-5, 5, value=generate_params['length_penalty'], label='length_penalty')
|
|
|
shared.gradio['early_stopping'] = gr.Checkbox(value=generate_params['early_stopping'], label='early_stopping')
|
|
|
|
|
|
- with gr.Row():
|
|
|
- shared.gradio['lora_menu'] = gr.Dropdown(choices=available_loras, value=shared.lora_name, label='LoRA')
|
|
|
- ui.create_refresh_button(shared.gradio['lora_menu'], lambda : None, lambda : {'choices': get_available_loras()}, 'refresh-button')
|
|
|
|
|
|
with gr.Accordion('Soft prompt', open=False):
|
|
|
with gr.Row():
|
|
|
@@ -212,17 +229,14 @@ def create_settings_menus(default_preset):
|
|
|
with gr.Row():
|
|
|
shared.gradio['upload_softprompt'] = gr.File(type='binary', file_types=['.zip'])
|
|
|
|
|
|
- shared.gradio['model_menu'].change(load_model_wrapper, [shared.gradio['model_menu']], [shared.gradio['model_menu']], show_progress=True)
|
|
|
- shared.gradio['preset_menu'].change(load_preset_values, [shared.gradio['preset_menu']], [shared.gradio[k] for k in ['do_sample', 'temperature', 'top_p', 'typical_p', 'repetition_penalty', 'encoder_repetition_penalty', 'top_k', 'min_length', 'no_repeat_ngram_size', 'num_beams', 'penalty_alpha', 'length_penalty', 'early_stopping']])
|
|
|
- shared.gradio['lora_menu'].change(load_lora_wrapper, [shared.gradio['lora_menu']], [shared.gradio['lora_menu']], show_progress=True)
|
|
|
- shared.gradio['softprompts_menu'].change(load_soft_prompt, [shared.gradio['softprompts_menu']], [shared.gradio['softprompts_menu']], show_progress=True)
|
|
|
- shared.gradio['upload_softprompt'].upload(upload_soft_prompt, [shared.gradio['upload_softprompt']], [shared.gradio['softprompts_menu']])
|
|
|
+ shared.gradio['preset_menu'].change(load_preset_values, [shared.gradio[k] for k in ['preset_menu', 'generate_state']], [shared.gradio[k] for k in ['generate_state', 'do_sample', 'temperature', 'top_p', 'typical_p', 'repetition_penalty', 'encoder_repetition_penalty', 'top_k', 'min_length', 'no_repeat_ngram_size', 'num_beams', 'penalty_alpha', 'length_penalty', 'early_stopping']])
|
|
|
+ shared.gradio['softprompts_menu'].change(load_soft_prompt, shared.gradio['softprompts_menu'], shared.gradio['softprompts_menu'], show_progress=True)
|
|
|
+ shared.gradio['upload_softprompt'].upload(upload_soft_prompt, shared.gradio['upload_softprompt'], shared.gradio['softprompts_menu'])
|
|
|
|
|
|
def set_interface_arguments(interface_mode, extensions, bool_active):
|
|
|
modes = ["default", "notebook", "chat", "cai_chat"]
|
|
|
cmd_list = vars(shared.args)
|
|
|
bool_list = [k for k in cmd_list if type(cmd_list[k]) is bool and k not in modes]
|
|
|
- #int_list = [k for k in cmd_list if type(k) is int]
|
|
|
|
|
|
shared.args.extensions = extensions
|
|
|
for k in modes[1:]:
|
|
|
@@ -295,10 +309,7 @@ def create_interface():
|
|
|
if shared.is_chat():
|
|
|
shared.gradio['Chat input'] = gr.State()
|
|
|
with gr.Tab("Text generation", elem_id="main"):
|
|
|
- if shared.args.cai_chat:
|
|
|
- shared.gradio['display'] = gr.HTML(value=generate_chat_html(shared.history['visible'], shared.settings['name1'], shared.settings['name2'], shared.character))
|
|
|
- else:
|
|
|
- shared.gradio['display'] = gr.Chatbot(value=shared.history['visible'], elem_id="gradio-chatbot")
|
|
|
+ shared.gradio['display'] = gr.HTML(value=chat_html_wrapper(shared.history['visible'], shared.settings['name1'], shared.settings['name2'], 'cai-chat'))
|
|
|
shared.gradio['textbox'] = gr.Textbox(label='Input')
|
|
|
with gr.Row():
|
|
|
shared.gradio['Generate'] = gr.Button('Generate')
|
|
|
@@ -315,11 +326,20 @@ def create_interface():
|
|
|
shared.gradio['Clear history-confirm'] = gr.Button('Confirm', variant="stop", visible=False)
|
|
|
shared.gradio['Clear history-cancel'] = gr.Button('Cancel', visible=False)
|
|
|
|
|
|
+ shared.gradio["Chat mode"] = gr.Radio(choices=["cai-chat", "chat", "instruct"], value="cai-chat", label="Mode")
|
|
|
+ shared.gradio["Instruction templates"] = gr.Dropdown(choices=get_available_instruction_templates(), label="Instruction template", value="None", visible=False)
|
|
|
+
|
|
|
with gr.Tab("Character", elem_id="chat-settings"):
|
|
|
- shared.gradio['name1'] = gr.Textbox(value=shared.settings['name1'], lines=1, label='Your name')
|
|
|
- shared.gradio['name2'] = gr.Textbox(value=shared.settings['name2'], lines=1, label='Character\'s name')
|
|
|
- shared.gradio['greeting'] = gr.Textbox(value=shared.settings['greeting'], lines=2, label='Greeting')
|
|
|
- shared.gradio['context'] = gr.Textbox(value=shared.settings['context'], lines=8, label='Context')
|
|
|
+ with gr.Row():
|
|
|
+ with gr.Column(scale=8):
|
|
|
+ shared.gradio['name1'] = gr.Textbox(value=shared.settings['name1'], lines=1, label='Your name')
|
|
|
+ shared.gradio['name2'] = gr.Textbox(value=shared.settings['name2'], lines=1, label='Character\'s name')
|
|
|
+ shared.gradio['greeting'] = gr.Textbox(value=shared.settings['greeting'], lines=4, label='Greeting')
|
|
|
+ shared.gradio['context'] = gr.Textbox(value=shared.settings['context'], lines=4, label='Context')
|
|
|
+ shared.gradio['end_of_turn'] = gr.Textbox(value=shared.settings["end_of_turn"], lines=1, label='End of turn string')
|
|
|
+ with gr.Column(scale=1):
|
|
|
+ shared.gradio['character_picture'] = gr.Image(label='Character picture', type="pil")
|
|
|
+ shared.gradio['your_picture'] = gr.Image(label='Your picture', type="pil", value=Image.open(Path("cache/pfp_me.png")) if Path("cache/pfp_me.png").exists() else None)
|
|
|
with gr.Row():
|
|
|
shared.gradio['character_menu'] = gr.Dropdown(choices=available_characters, value='None', label='Character', elem_id='character-menu')
|
|
|
ui.create_refresh_button(shared.gradio['character_menu'], lambda : None, lambda : {'choices': get_available_characters()}, 'refresh-button')
|
|
|
@@ -347,8 +367,6 @@ def create_interface():
|
|
|
|
|
|
gr.Markdown("# TavernAI PNG format")
|
|
|
shared.gradio['upload_img_tavern'] = gr.File(type='binary', file_types=['image'])
|
|
|
- with gr.Tab('Upload your profile picture'):
|
|
|
- shared.gradio['upload_img_me'] = gr.File(type='binary', file_types=['image'])
|
|
|
|
|
|
with gr.Tab("Parameters", elem_id="parameters"):
|
|
|
with gr.Box():
|
|
|
@@ -359,35 +377,35 @@ def create_interface():
|
|
|
shared.gradio['chat_prompt_size_slider'] = gr.Slider(minimum=shared.settings['chat_prompt_size_min'], maximum=shared.settings['chat_prompt_size_max'], step=1, label='Maximum prompt size in tokens', value=shared.settings['chat_prompt_size'])
|
|
|
with gr.Column():
|
|
|
shared.gradio['chat_generation_attempts'] = gr.Slider(minimum=shared.settings['chat_generation_attempts_min'], maximum=shared.settings['chat_generation_attempts_max'], value=shared.settings['chat_generation_attempts'], step=1, label='Generation attempts (for longer replies)')
|
|
|
- shared.gradio['check'] = gr.Checkbox(value=shared.settings['stop_at_newline'], label='Stop generating at new line character?')
|
|
|
+ shared.gradio['stop_at_newline'] = gr.Checkbox(value=shared.settings['stop_at_newline'], label='Stop generating at new line character?')
|
|
|
|
|
|
create_settings_menus(default_preset)
|
|
|
|
|
|
- function_call = 'chat.cai_chatbot_wrapper' if shared.args.cai_chat else 'chat.chatbot_wrapper'
|
|
|
- shared.input_params = [shared.gradio[k] for k in ['Chat input', 'max_new_tokens', 'do_sample', 'temperature', 'top_p', 'typical_p', 'repetition_penalty', 'encoder_repetition_penalty', 'top_k', 'min_length', 'no_repeat_ngram_size', 'num_beams', 'penalty_alpha', 'length_penalty', 'early_stopping', 'seed', 'name1', 'name2', 'context', 'check', 'chat_prompt_size_slider', 'chat_generation_attempts']]
|
|
|
+ shared.input_params = [shared.gradio[k] for k in ['Chat input', 'generate_state', 'name1', 'name2', 'context', 'Chat mode', 'end_of_turn']]
|
|
|
|
|
|
def set_chat_input(textbox):
|
|
|
return textbox, ""
|
|
|
|
|
|
gen_events.append(shared.gradio['Generate'].click(set_chat_input, shared.gradio['textbox'], [shared.gradio['Chat input'], shared.gradio['textbox']], show_progress=False))
|
|
|
- gen_events.append(shared.gradio['Generate'].click(eval(function_call), shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream))
|
|
|
+ gen_events.append(shared.gradio['Generate'].click(chat.cai_chatbot_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream))
|
|
|
gen_events.append(shared.gradio['textbox'].submit(set_chat_input, shared.gradio['textbox'], [shared.gradio['Chat input'], shared.gradio['textbox']], show_progress=False))
|
|
|
- gen_events.append(shared.gradio['textbox'].submit(eval(function_call), shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream))
|
|
|
+ gen_events.append(shared.gradio['textbox'].submit(chat.cai_chatbot_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream))
|
|
|
gen_events.append(shared.gradio['Regenerate'].click(chat.regenerate_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream))
|
|
|
gen_events.append(shared.gradio['Impersonate'].click(chat.impersonate_wrapper, shared.input_params, shared.gradio['textbox'], show_progress=shared.args.no_stream))
|
|
|
shared.gradio['Stop'].click(stop_everything_event, [], [], queue=False, cancels=gen_events if shared.args.no_stream else None)
|
|
|
|
|
|
shared.gradio['Copy last reply'].click(chat.send_last_reply_to_input, [], shared.gradio['textbox'], show_progress=shared.args.no_stream)
|
|
|
- shared.gradio['Replace last reply'].click(chat.replace_last_reply, [shared.gradio['textbox'], shared.gradio['name1'], shared.gradio['name2']], shared.gradio['display'], show_progress=shared.args.no_stream)
|
|
|
+ shared.gradio['Replace last reply'].click(chat.replace_last_reply, [shared.gradio[k] for k in ['textbox', 'name1', 'name2', 'Chat mode']], shared.gradio['display'], show_progress=shared.args.no_stream)
|
|
|
|
|
|
# Clear history with confirmation
|
|
|
clear_arr = [shared.gradio[k] for k in ['Clear history-confirm', 'Clear history', 'Clear history-cancel']]
|
|
|
shared.gradio['Clear history'].click(lambda :[gr.update(visible=True), gr.update(visible=False), gr.update(visible=True)], None, clear_arr)
|
|
|
shared.gradio['Clear history-confirm'].click(lambda :[gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)], None, clear_arr)
|
|
|
- shared.gradio['Clear history-confirm'].click(chat.clear_chat_log, [shared.gradio['name1'], shared.gradio['name2'], shared.gradio['greeting']], shared.gradio['display'])
|
|
|
+ shared.gradio['Clear history-confirm'].click(chat.clear_chat_log, [shared.gradio[k] for k in ['name1', 'name2', 'greeting', 'Chat mode']], shared.gradio['display'])
|
|
|
shared.gradio['Clear history-cancel'].click(lambda :[gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)], None, clear_arr)
|
|
|
+ shared.gradio['Chat mode'].change(lambda x : gr.update(visible= x=='instruct'), shared.gradio['Chat mode'], shared.gradio['Instruction templates'])
|
|
|
|
|
|
- shared.gradio['Remove last'].click(chat.remove_last_message, [shared.gradio['name1'], shared.gradio['name2']], [shared.gradio['display'], shared.gradio['textbox']], show_progress=False)
|
|
|
+ shared.gradio['Remove last'].click(chat.remove_last_message, [shared.gradio[k] for k in ['name1', 'name2', 'Chat mode']], [shared.gradio['display'], shared.gradio['textbox']], show_progress=False)
|
|
|
shared.gradio['download_button'].click(chat.save_history, inputs=[], outputs=[shared.gradio['download']])
|
|
|
shared.gradio['Upload character'].click(chat.upload_character, [shared.gradio['upload_json'], shared.gradio['upload_img_bot']], [shared.gradio['character_menu']])
|
|
|
|
|
|
@@ -399,20 +417,21 @@ def create_interface():
|
|
|
shared.gradio['textbox'].submit(lambda x: '', shared.gradio['textbox'], shared.gradio['textbox'], show_progress=False)
|
|
|
shared.gradio['textbox'].submit(lambda : chat.save_history(timestamp=False), [], [], show_progress=False)
|
|
|
|
|
|
- shared.gradio['character_menu'].change(chat.load_character, [shared.gradio[k] for k in ['character_menu', 'name1', 'name2']], [shared.gradio[k] for k in ['name1', 'name2', 'greeting', 'context', 'display']])
|
|
|
- shared.gradio['upload_chat_history'].upload(chat.load_history, [shared.gradio['upload_chat_history'], shared.gradio['name1'], shared.gradio['name2']], [])
|
|
|
+ shared.gradio['character_menu'].change(chat.load_character, [shared.gradio[k] for k in ['character_menu', 'name1', 'name2', 'Chat mode']], [shared.gradio[k] for k in ['name1', 'name2', 'character_picture', 'greeting', 'context', 'end_of_turn', 'display']])
|
|
|
+ shared.gradio['Instruction templates'].change(lambda character, name1, name2, mode: chat.load_character(character, name1, name2, mode), [shared.gradio[k] for k in ['Instruction templates', 'name1', 'name2', 'Chat mode']], [shared.gradio[k] for k in ['name1', 'name2', 'character_picture', 'greeting', 'context', 'end_of_turn', 'display']])
|
|
|
+ shared.gradio['upload_chat_history'].upload(chat.load_history, [shared.gradio[k] for k in ['upload_chat_history', 'name1', 'name2']], [])
|
|
|
shared.gradio['upload_img_tavern'].upload(chat.upload_tavern_character, [shared.gradio['upload_img_tavern'], shared.gradio['name1'], shared.gradio['name2']], [shared.gradio['character_menu']])
|
|
|
- shared.gradio['upload_img_me'].upload(chat.upload_your_profile_picture, [shared.gradio['upload_img_me']], [])
|
|
|
+ shared.gradio['your_picture'].change(chat.upload_your_profile_picture, [shared.gradio[k] for k in ['your_picture', 'name1', 'name2', 'Chat mode']], shared.gradio['display'])
|
|
|
|
|
|
- reload_func = chat.redraw_html if shared.args.cai_chat else lambda : shared.history['visible']
|
|
|
- reload_inputs = [shared.gradio['name1'], shared.gradio['name2']] if shared.args.cai_chat else []
|
|
|
- shared.gradio['upload_chat_history'].upload(reload_func, reload_inputs, [shared.gradio['display']])
|
|
|
- shared.gradio['upload_img_me'].upload(reload_func, reload_inputs, [shared.gradio['display']])
|
|
|
- shared.gradio['Stop'].click(reload_func, reload_inputs, [shared.gradio['display']])
|
|
|
+ reload_inputs = [shared.gradio[k] for k in ['name1', 'name2', 'Chat mode']]
|
|
|
+ shared.gradio['upload_chat_history'].upload(chat.redraw_html, reload_inputs, [shared.gradio['display']])
|
|
|
+ shared.gradio['Stop'].click(chat.redraw_html, reload_inputs, [shared.gradio['display']])
|
|
|
+ shared.gradio['Instruction templates'].change(chat.redraw_html, reload_inputs, [shared.gradio['display']])
|
|
|
+ shared.gradio['Chat mode'].change(chat.redraw_html, reload_inputs, [shared.gradio['display']])
|
|
|
|
|
|
shared.gradio['interface'].load(None, None, None, _js=f"() => {{{ui.main_js+ui.chat_js}}}")
|
|
|
shared.gradio['interface'].load(lambda : chat.load_default_history(shared.settings['name1'], shared.settings['name2']), None, None)
|
|
|
- shared.gradio['interface'].load(reload_func, reload_inputs, [shared.gradio['display']], show_progress=True)
|
|
|
+ shared.gradio['interface'].load(chat.redraw_html, reload_inputs, [shared.gradio['display']], show_progress=True)
|
|
|
|
|
|
elif shared.args.notebook:
|
|
|
with gr.Tab("Text generation", elem_id="main"):
|
|
|
@@ -442,9 +461,9 @@ def create_interface():
|
|
|
with gr.Tab("Parameters", elem_id="parameters"):
|
|
|
create_settings_menus(default_preset)
|
|
|
|
|
|
- shared.input_params = [shared.gradio[k] for k in ['textbox', 'max_new_tokens', 'do_sample', 'temperature', 'top_p', 'typical_p', 'repetition_penalty', 'encoder_repetition_penalty', 'top_k', 'min_length', 'no_repeat_ngram_size', 'num_beams', 'penalty_alpha', 'length_penalty', 'early_stopping', 'seed']]
|
|
|
+ shared.input_params = [shared.gradio[k] for k in ['textbox', 'generate_state']]
|
|
|
output_params = [shared.gradio[k] for k in ['textbox', 'markdown', 'html']]
|
|
|
- gen_events.append(shared.gradio['Generate'].click(generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream, api_name='textgen'))
|
|
|
+ gen_events.append(shared.gradio['Generate'].click(generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream))
|
|
|
gen_events.append(shared.gradio['textbox'].submit(generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream))
|
|
|
shared.gradio['Stop'].click(stop_everything_event, [], [], queue=False, cancels=gen_events if shared.args.no_stream else None)
|
|
|
shared.gradio['interface'].load(None, None, None, _js=f"() => {{{ui.main_js}}}")
|
|
|
@@ -475,14 +494,17 @@ def create_interface():
|
|
|
with gr.Tab("Parameters", elem_id="parameters"):
|
|
|
create_settings_menus(default_preset)
|
|
|
|
|
|
- shared.input_params = [shared.gradio[k] for k in ['textbox', 'max_new_tokens', 'do_sample', 'temperature', 'top_p', 'typical_p', 'repetition_penalty', 'encoder_repetition_penalty', 'top_k', 'min_length', 'no_repeat_ngram_size', 'num_beams', 'penalty_alpha', 'length_penalty', 'early_stopping', 'seed']]
|
|
|
+ shared.input_params = [shared.gradio[k] for k in ['textbox', 'generate_state']]
|
|
|
output_params = [shared.gradio[k] for k in ['output_textbox', 'markdown', 'html']]
|
|
|
- gen_events.append(shared.gradio['Generate'].click(generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream, api_name='textgen'))
|
|
|
+ gen_events.append(shared.gradio['Generate'].click(generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream))
|
|
|
gen_events.append(shared.gradio['textbox'].submit(generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream))
|
|
|
gen_events.append(shared.gradio['Continue'].click(generate_reply, [shared.gradio['output_textbox']] + shared.input_params[1:], output_params, show_progress=shared.args.no_stream))
|
|
|
shared.gradio['Stop'].click(stop_everything_event, [], [], queue=False, cancels=gen_events if shared.args.no_stream else None)
|
|
|
shared.gradio['interface'].load(None, None, None, _js=f"() => {{{ui.main_js}}}")
|
|
|
|
|
|
+ with gr.Tab("Model", elem_id="model-tab"):
|
|
|
+ create_model_menus()
|
|
|
+
|
|
|
with gr.Tab("Training", elem_id="training-tab"):
|
|
|
training.create_train_interface()
|
|
|
|
|
|
@@ -496,7 +518,6 @@ def create_interface():
|
|
|
cmd_list = vars(shared.args)
|
|
|
bool_list = [k for k in cmd_list if type(cmd_list[k]) is bool and k not in modes]
|
|
|
bool_active = [k for k in bool_list if vars(shared.args)[k]]
|
|
|
- #int_list = [k for k in cmd_list if type(k) is int]
|
|
|
|
|
|
gr.Markdown("*Experimental*")
|
|
|
shared.gradio['interface_modes_menu'] = gr.Dropdown(choices=modes, value=current_mode, label="Mode")
|
|
|
@@ -510,6 +531,21 @@ def create_interface():
|
|
|
if shared.args.extensions is not None:
|
|
|
extensions_module.create_extensions_block()
|
|
|
|
|
|
+ def change_dict_value(d, key, value):
|
|
|
+ d[key] = value
|
|
|
+ return d
|
|
|
+
|
|
|
+ for k in ['do_sample', 'temperature', 'top_p', 'typical_p', 'repetition_penalty', 'encoder_repetition_penalty', 'top_k', 'min_length', 'no_repeat_ngram_size', 'num_beams', 'penalty_alpha', 'length_penalty', 'early_stopping', 'max_new_tokens', 'seed', 'stop_at_newline', 'chat_prompt_size_slider', 'chat_generation_attempts']:
|
|
|
+ if k not in shared.gradio:
|
|
|
+ continue
|
|
|
+ if type(shared.gradio[k]) in [gr.Checkbox, gr.Number]:
|
|
|
+ shared.gradio[k].change(lambda state, value, copy=k: change_dict_value(state, copy, value), inputs=[shared.gradio['generate_state'], shared.gradio[k]], outputs=shared.gradio['generate_state'])
|
|
|
+ else:
|
|
|
+ shared.gradio[k].release(lambda state, value, copy=k: change_dict_value(state, copy, value), inputs=[shared.gradio['generate_state'], shared.gradio[k]], outputs=shared.gradio['generate_state'])
|
|
|
+
|
|
|
+ if not shared.is_chat():
|
|
|
+ api.create_apis()
|
|
|
+
|
|
|
# Authentication
|
|
|
auth = None
|
|
|
if shared.args.gradio_auth_path is not None:
|