Procházet zdrojové kódy

Update API example

oobabooga před 2 roky
rodič
revize
2289d3686f
2 změnil soubory, kde provedl 3 přidání a 1 odebrání
  1. 2 0
      api-example.py
  2. 1 1
      server.py

+ 2 - 0
api-example.py

@@ -37,6 +37,8 @@ params = {
     'seed': -1,
     'add_bos_token': True,
     'custom_stopping_strings': [],
+    'truncation_length': 2048,
+    'ban_eos_token': False,
 }
 
 # Input prompt

+ 1 - 1
server.py

@@ -275,7 +275,7 @@ def create_settings_menus(default_preset):
             with gr.Group():
                 with gr.Row():
                     shared.gradio['add_bos_token'] = gr.Checkbox(value=shared.settings['add_bos_token'], label='Add the bos_token to the beginning of prompts', info='Disabling this can make the replies more creative.')
-                    shared.gradio['ban_eos_token'] = gr.Checkbox(value=shared.settings['ban_eos_token'], label='Ban the eos token', info='This forces the model to never end the generation prematurely.')
+                    shared.gradio['ban_eos_token'] = gr.Checkbox(value=shared.settings['ban_eos_token'], label='Ban the eos_token', info='This forces the model to never end the generation prematurely.')
                 shared.gradio['truncation_length'] = gr.Slider(value=shared.settings['truncation_length'], minimum=shared.settings['truncation_length_min'], maximum=shared.settings['truncation_length_max'], step=1, label='Truncate the prompt up to this length', info='The leftmost tokens are removed if the prompt exceeds this length. Most models require this to be at most 2048.')
                 shared.gradio['custom_stopping_strings'] = gr.Textbox(lines=1, value=shared.settings["custom_stopping_strings"] or None, label='Custom stopping strings', info='In addition to the defaults. Written between "" and separated by commas. For instance: "\\nYour Assistant:", "\\nThe assistant:"')