|
|
@@ -357,12 +357,30 @@ else:
|
|
|
title = 'Text generation web UI'
|
|
|
|
|
|
|
|
|
+def list_interface_input_elements(chat=False):
|
|
|
+ elements = ['max_new_tokens', 'seed', 'temperature', 'top_p', 'top_k', 'typical_p', 'repetition_penalty', 'encoder_repetition_penalty', 'no_repeat_ngram_size', 'min_length', 'do_sample', 'penalty_alpha', 'num_beams', 'length_penalty', 'early_stopping', 'add_bos_token']
|
|
|
+ if chat:
|
|
|
+ elements += ['name1', 'name2', 'greeting', 'context', 'end_of_turn', 'chat_prompt_size', 'chat_generation_attempts', 'stop_at_newline', 'mode']
|
|
|
+ return elements
|
|
|
+
|
|
|
+
|
|
|
+def gather_interface_values(*args):
|
|
|
+ output = {}
|
|
|
+ for i, el in enumerate(shared.input_elements):
|
|
|
+ output[el] = args[i]
|
|
|
+ print(output)
|
|
|
+ return output
|
|
|
+
|
|
|
+
|
|
|
def create_interface():
|
|
|
gen_events = []
|
|
|
if shared.args.extensions is not None and len(shared.args.extensions) > 0:
|
|
|
extensions_module.load_extensions()
|
|
|
|
|
|
with gr.Blocks(css=ui.css if not shared.is_chat() else ui.css + ui.chat_css, analytics_enabled=False, title=title) as shared.gradio['interface']:
|
|
|
+ shared.input_elements = list_interface_input_elements(chat=True)
|
|
|
+ shared.gradio['interface_state'] = gr.State({k: None for k in shared.input_elements})
|
|
|
+
|
|
|
if shared.is_chat():
|
|
|
shared.gradio['Chat input'] = gr.State()
|
|
|
with gr.Tab("Text generation", elem_id="main"):
|
|
|
@@ -384,7 +402,7 @@ def create_interface():
|
|
|
shared.gradio['Clear history-confirm'] = gr.Button('Confirm', variant="stop", visible=False)
|
|
|
shared.gradio['Clear history-cancel'] = gr.Button('Cancel', visible=False)
|
|
|
|
|
|
- shared.gradio["Chat mode"] = gr.Radio(choices=["cai-chat", "chat", "instruct"], value="cai-chat", label="Mode")
|
|
|
+ shared.gradio["mode"] = gr.Radio(choices=["cai-chat", "chat", "instruct"], value="cai-chat", label="Mode")
|
|
|
shared.gradio["Instruction templates"] = gr.Dropdown(choices=get_available_instruction_templates(), label="Instruction template", value="None", visible=False, info="Change this according to the model/LoRA that you are using.")
|
|
|
|
|
|
with gr.Tab("Character", elem_id="chat-settings"):
|
|
|
@@ -439,75 +457,85 @@ def create_interface():
|
|
|
|
|
|
create_settings_menus(default_preset)
|
|
|
|
|
|
- shared.input_params = [shared.gradio[k] for k in ['Chat input', 'generate_state', 'name1', 'name2', 'context', 'Chat mode', 'end_of_turn']]
|
|
|
+ shared.input_params = [shared.gradio[k] for k in ['Chat input', 'interface_state']]
|
|
|
clear_arr = [shared.gradio[k] for k in ['Clear history-confirm', 'Clear history', 'Clear history-cancel']]
|
|
|
- reload_inputs = [shared.gradio[k] for k in ['name1', 'name2', 'Chat mode']]
|
|
|
+ reload_inputs = [shared.gradio[k] for k in ['name1', 'name2', 'mode']]
|
|
|
|
|
|
gen_events.append(shared.gradio['Generate'].click(
|
|
|
+ gather_interface_values, [shared.gradio[k] for k in shared.input_elements], shared.gradio['interface_state']).then(
|
|
|
lambda x: (x, ''), shared.gradio['textbox'], [shared.gradio['Chat input'], shared.gradio['textbox']], show_progress=False).then(
|
|
|
chat.cai_chatbot_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream).then(
|
|
|
- chat.save_history, shared.gradio['Chat mode'], None, show_progress=False)
|
|
|
+ chat.save_history, shared.gradio['mode'], None, show_progress=False)
|
|
|
)
|
|
|
|
|
|
gen_events.append(shared.gradio['textbox'].submit(
|
|
|
+ gather_interface_values, [shared.gradio[k] for k in shared.input_elements], shared.gradio['interface_state']).then(
|
|
|
lambda x: (x, ''), shared.gradio['textbox'], [shared.gradio['Chat input'], shared.gradio['textbox']], show_progress=False).then(
|
|
|
chat.cai_chatbot_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream).then(
|
|
|
- chat.save_history, shared.gradio['Chat mode'], None, show_progress=False)
|
|
|
+ chat.save_history, shared.gradio['mode'], None, show_progress=False)
|
|
|
)
|
|
|
|
|
|
gen_events.append(shared.gradio['Regenerate'].click(
|
|
|
+ gather_interface_values, [shared.gradio[k] for k in shared.input_elements], shared.gradio['interface_state']).then(
|
|
|
chat.regenerate_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream).then(
|
|
|
- chat.save_history, shared.gradio['Chat mode'], None, show_progress=False)
|
|
|
+ chat.save_history, shared.gradio['mode'], None, show_progress=False)
|
|
|
)
|
|
|
|
|
|
gen_events.append(shared.gradio['Continue'].click(
|
|
|
+ gather_interface_values, [shared.gradio[k] for k in shared.input_elements], shared.gradio['interface_state']).then(
|
|
|
chat.continue_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream).then(
|
|
|
- chat.save_history, shared.gradio['Chat mode'], None, show_progress=False)
|
|
|
+ chat.save_history, shared.gradio['mode'], None, show_progress=False)
|
|
|
+ )
|
|
|
+
|
|
|
+ gen_events.append(shared.gradio['Impersonate'].click(
|
|
|
+ gather_interface_values, [shared.gradio[k] for k in shared.input_elements], shared.gradio['interface_state']).then(
|
|
|
+ chat.impersonate_wrapper, shared.input_params, shared.gradio['textbox'], show_progress=shared.args.no_stream)
|
|
|
)
|
|
|
|
|
|
shared.gradio['Replace last reply'].click(
|
|
|
- chat.replace_last_reply, [shared.gradio[k] for k in ['textbox', 'name1', 'name2', 'Chat mode']], shared.gradio['display'], show_progress=shared.args.no_stream).then(
|
|
|
+ chat.replace_last_reply, [shared.gradio[k] for k in ['textbox', 'name1', 'name2', 'mode']], shared.gradio['display'], show_progress=shared.args.no_stream).then(
|
|
|
lambda x: '', shared.gradio['textbox'], shared.gradio['textbox'], show_progress=False).then(
|
|
|
- chat.save_history, shared.gradio['Chat mode'], None, show_progress=False)
|
|
|
+ chat.save_history, shared.gradio['mode'], None, show_progress=False)
|
|
|
|
|
|
shared.gradio['Clear history-confirm'].click(
|
|
|
lambda: [gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)], None, clear_arr).then(
|
|
|
- chat.clear_chat_log, [shared.gradio[k] for k in ['name1', 'name2', 'greeting', 'Chat mode']], shared.gradio['display']).then(
|
|
|
- chat.save_history, shared.gradio['Chat mode'], None, show_progress=False)
|
|
|
+ chat.clear_chat_log, [shared.gradio[k] for k in ['name1', 'name2', 'greeting', 'mode']], shared.gradio['display']).then(
|
|
|
+ chat.save_history, shared.gradio['mode'], None, show_progress=False)
|
|
|
|
|
|
shared.gradio['Stop'].click(
|
|
|
stop_everything_event, None, None, queue=False, cancels=gen_events if shared.args.no_stream else None).then(
|
|
|
chat.redraw_html, reload_inputs, shared.gradio['display'])
|
|
|
|
|
|
- shared.gradio['Chat mode'].change(
|
|
|
- lambda x: gr.update(visible=x == 'instruct'), shared.gradio['Chat mode'], shared.gradio['Instruction templates']).then(
|
|
|
- lambda x: gr.update(interactive=x != 'instruct'), shared.gradio['Chat mode'], shared.gradio['character_menu']).then(
|
|
|
+ shared.gradio['mode'].change(
|
|
|
+ lambda x: gr.update(visible=x == 'instruct'), shared.gradio['mode'], shared.gradio['Instruction templates']).then(
|
|
|
+ lambda x: gr.update(interactive=x != 'instruct'), shared.gradio['mode'], shared.gradio['character_menu']).then(
|
|
|
chat.redraw_html, reload_inputs, shared.gradio['display'])
|
|
|
|
|
|
shared.gradio['Instruction templates'].change(
|
|
|
- lambda character, name1, name2, mode: chat.load_character(character, name1, name2, mode), [shared.gradio[k] for k in ['Instruction templates', 'name1', 'name2', 'Chat mode']], [shared.gradio[k] for k in ['name1', 'name2', 'character_picture', 'greeting', 'context', 'end_of_turn', 'display']]).then(
|
|
|
+ lambda character, name1, name2, mode: chat.load_character(character, name1, name2, mode), [shared.gradio[k] for k in ['Instruction templates', 'name1', 'name2', 'mode']], [shared.gradio[k] for k in ['name1', 'name2', 'character_picture', 'greeting', 'context', 'end_of_turn', 'display']]).then(
|
|
|
chat.redraw_html, reload_inputs, shared.gradio['display'])
|
|
|
|
|
|
shared.gradio['upload_chat_history'].upload(
|
|
|
chat.load_history, [shared.gradio[k] for k in ['upload_chat_history', 'name1', 'name2']], None).then(
|
|
|
chat.redraw_html, reload_inputs, shared.gradio['display'])
|
|
|
|
|
|
- gen_events.append(shared.gradio['Impersonate'].click(chat.impersonate_wrapper, shared.input_params, shared.gradio['textbox'], show_progress=shared.args.no_stream))
|
|
|
shared.gradio['Copy last reply'].click(chat.send_last_reply_to_input, None, shared.gradio['textbox'], show_progress=shared.args.no_stream)
|
|
|
shared.gradio['Clear history'].click(lambda: [gr.update(visible=True), gr.update(visible=False), gr.update(visible=True)], None, clear_arr)
|
|
|
shared.gradio['Clear history-cancel'].click(lambda: [gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)], None, clear_arr)
|
|
|
- shared.gradio['Remove last'].click(chat.remove_last_message, [shared.gradio[k] for k in ['name1', 'name2', 'Chat mode']], [shared.gradio['display'], shared.gradio['textbox']], show_progress=False)
|
|
|
- shared.gradio['download_button'].click(lambda x: chat.save_history(x, timestamp=True), shared.gradio['Chat mode'], shared.gradio['download'])
|
|
|
+ shared.gradio['Remove last'].click(chat.remove_last_message, [shared.gradio[k] for k in ['name1', 'name2', 'mode']], [shared.gradio['display'], shared.gradio['textbox']], show_progress=False)
|
|
|
+ shared.gradio['download_button'].click(lambda x: chat.save_history(x, timestamp=True), shared.gradio['mode'], shared.gradio['download'])
|
|
|
shared.gradio['Upload character'].click(chat.upload_character, [shared.gradio['upload_json'], shared.gradio['upload_img_bot']], [shared.gradio['character_menu']])
|
|
|
- shared.gradio['character_menu'].change(chat.load_character, [shared.gradio[k] for k in ['character_menu', 'name1', 'name2', 'Chat mode']], [shared.gradio[k] for k in ['name1', 'name2', 'character_picture', 'greeting', 'context', 'end_of_turn', 'display']])
|
|
|
+ shared.gradio['character_menu'].change(chat.load_character, [shared.gradio[k] for k in ['character_menu', 'name1', 'name2', 'mode']], [shared.gradio[k] for k in ['name1', 'name2', 'character_picture', 'greeting', 'context', 'end_of_turn', 'display']])
|
|
|
shared.gradio['upload_img_tavern'].upload(chat.upload_tavern_character, [shared.gradio['upload_img_tavern'], shared.gradio['name1'], shared.gradio['name2']], [shared.gradio['character_menu']])
|
|
|
- shared.gradio['your_picture'].change(chat.upload_your_profile_picture, [shared.gradio[k] for k in ['your_picture', 'name1', 'name2', 'Chat mode']], shared.gradio['display'])
|
|
|
+ shared.gradio['your_picture'].change(chat.upload_your_profile_picture, [shared.gradio[k] for k in ['your_picture', 'name1', 'name2', 'mode']], shared.gradio['display'])
|
|
|
|
|
|
shared.gradio['interface'].load(None, None, None, _js=f"() => {{{ui.main_js+ui.chat_js}}}")
|
|
|
shared.gradio['interface'].load(chat.load_default_history, [shared.gradio[k] for k in ['name1', 'name2']], None)
|
|
|
shared.gradio['interface'].load(chat.redraw_html, reload_inputs, shared.gradio['display'], show_progress=True)
|
|
|
|
|
|
elif shared.args.notebook:
|
|
|
+ shared.input_elements = list_interface_input_elements(chat=False)
|
|
|
+ shared.gradio['interface_state'] = gr.State({k: None for k in shared.input_elements})
|
|
|
with gr.Tab("Text generation", elem_id="main"):
|
|
|
with gr.Row():
|
|
|
with gr.Column(scale=4):
|
|
|
@@ -537,12 +565,23 @@ def create_interface():
|
|
|
|
|
|
shared.input_params = [shared.gradio[k] for k in ['textbox', 'generate_state']]
|
|
|
output_params = [shared.gradio[k] for k in ['textbox', 'markdown', 'html']]
|
|
|
- gen_events.append(shared.gradio['Generate'].click(generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream))
|
|
|
- gen_events.append(shared.gradio['textbox'].submit(generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream))
|
|
|
+
|
|
|
+ gen_events.append(shared.gradio['Generate'].click(
|
|
|
+ gather_interface_values, [shared.gradio[k] for k in shared.input_elements], shared.gradio['interface_state']).then(
|
|
|
+ generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream)
|
|
|
+ )
|
|
|
+
|
|
|
+ gen_events.append(shared.gradio['textbox'].submit(
|
|
|
+ gather_interface_values, [shared.gradio[k] for k in shared.input_elements], shared.gradio['interface_state']).then(
|
|
|
+ generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream)
|
|
|
+ )
|
|
|
+
|
|
|
shared.gradio['Stop'].click(stop_everything_event, None, None, queue=False, cancels=gen_events if shared.args.no_stream else None)
|
|
|
shared.gradio['interface'].load(None, None, None, _js=f"() => {{{ui.main_js}}}")
|
|
|
|
|
|
else:
|
|
|
+ shared.input_elements = list_interface_input_elements(chat=False)
|
|
|
+ shared.gradio['interface_state'] = gr.State({k: None for k in shared.input_elements})
|
|
|
with gr.Tab("Text generation", elem_id="main"):
|
|
|
with gr.Row():
|
|
|
with gr.Column():
|
|
|
@@ -570,9 +609,22 @@ def create_interface():
|
|
|
|
|
|
shared.input_params = [shared.gradio[k] for k in ['textbox', 'generate_state']]
|
|
|
output_params = [shared.gradio[k] for k in ['output_textbox', 'markdown', 'html']]
|
|
|
- gen_events.append(shared.gradio['Generate'].click(generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream))
|
|
|
- gen_events.append(shared.gradio['textbox'].submit(generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream))
|
|
|
- gen_events.append(shared.gradio['Continue'].click(generate_reply, [shared.gradio['output_textbox']] + shared.input_params[1:], output_params, show_progress=shared.args.no_stream))
|
|
|
+
|
|
|
+ gen_events.append(shared.gradio['Generate'].click(
|
|
|
+ gather_interface_values, [shared.gradio[k] for k in shared.input_elements], shared.gradio['interface_state']).then(
|
|
|
+ generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream)
|
|
|
+ )
|
|
|
+
|
|
|
+ gen_events.append(shared.gradio['textbox'].submit(
|
|
|
+ gather_interface_values, [shared.gradio[k] for k in shared.input_elements], shared.gradio['interface_state']).then(
|
|
|
+ generate_reply, shared.input_params, output_params, show_progress=shared.args.no_stream)
|
|
|
+ )
|
|
|
+
|
|
|
+ gen_events.append(shared.gradio['Continue'].click(
|
|
|
+ gather_interface_values, [shared.gradio[k] for k in shared.input_elements], shared.gradio['interface_state']).then(
|
|
|
+ generate_reply, [shared.gradio['output_textbox']] + shared.input_params[1:], output_params, show_progress=shared.args.no_stream)
|
|
|
+ )
|
|
|
+
|
|
|
shared.gradio['Stop'].click(stop_everything_event, None, None, queue=False, cancels=gen_events if shared.args.no_stream else None)
|
|
|
shared.gradio['interface'].load(None, None, None, _js=f"() => {{{ui.main_js}}}")
|
|
|
|
|
|
@@ -607,18 +659,6 @@ def create_interface():
|
|
|
if shared.args.extensions is not None:
|
|
|
extensions_module.create_extensions_block()
|
|
|
|
|
|
- def change_dict_value(d, key, value):
|
|
|
- d[key] = value
|
|
|
- return d
|
|
|
-
|
|
|
- for k in ['do_sample', 'temperature', 'top_p', 'typical_p', 'repetition_penalty', 'encoder_repetition_penalty', 'top_k', 'min_length', 'no_repeat_ngram_size', 'num_beams', 'penalty_alpha', 'length_penalty', 'early_stopping', 'add_bos_token', 'max_new_tokens', 'seed', 'stop_at_newline', 'chat_prompt_size', 'chat_generation_attempts']:
|
|
|
- if k not in shared.gradio:
|
|
|
- continue
|
|
|
- if type(shared.gradio[k]) in [gr.Checkbox, gr.Number]:
|
|
|
- shared.gradio[k].change(lambda state, value, copy=k: change_dict_value(state, copy, value), inputs=[shared.gradio['generate_state'], shared.gradio[k]], outputs=shared.gradio['generate_state'])
|
|
|
- else:
|
|
|
- shared.gradio[k].release(lambda state, value, copy=k: change_dict_value(state, copy, value), inputs=[shared.gradio['generate_state'], shared.gradio[k]], outputs=shared.gradio['generate_state'])
|
|
|
-
|
|
|
if not shared.is_chat():
|
|
|
api.create_apis()
|
|
|
|