Skip to content

Commit

Permalink
Revert gradio to 3.50.2 (#5513)
Browse files Browse the repository at this point in the history
  • Loading branch information
oobabooga authored Feb 15, 2024
1 parent ea0e1fe commit 080f713
Show file tree
Hide file tree
Showing 25 changed files with 61 additions and 66 deletions.
5 changes: 0 additions & 5 deletions css/main.css
Original file line number Diff line number Diff line change
Expand Up @@ -89,11 +89,6 @@ div.svelte-15lo0d8 > *, div.svelte-15lo0d8 > .form > * {
flex-wrap: nowrap;
}

gradio-app > :first-child {
padding-left: var(--size-4) !important;
padding-right: var(--size-4) !important;
}

.header_bar {
background-color: #f7f7f7;
box-shadow: 0 2px 3px rgba(22 22 22 / 35%);
Expand Down
2 changes: 1 addition & 1 deletion extensions/gallery/script.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def ui():
samples_per_page=settings["gallery-items_per_page"]
)

filter_box.change(lambda: None, None, None, js=f'() => {{{custom_js()}; gotoFirstPage()}}').success(
filter_box.change(lambda: None, None, None, _js=f'() => {{{custom_js()}; gotoFirstPage()}}').success(
filter_cards, filter_box, gallery).then(
lambda x: gr.update(elem_classes='highlighted-border' if x != '' else ''), filter_box, filter_box, show_progress=False)

Expand Down
2 changes: 1 addition & 1 deletion extensions/whisper_stt/script.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def ui():

audio.change(
auto_transcribe, [audio, auto_submit, whipser_model, whipser_language], [shared.gradio['textbox'], audio]).then(
None, auto_submit, None, js="(check) => {if (check) { document.getElementById('Generate').click() }}")
None, auto_submit, None, _js="(check) => {if (check) { document.getElementById('Generate').click() }}")

whipser_model.change(lambda x: params.update({"whipser_model": x}), whipser_model, None)
whipser_language.change(lambda x: params.update({"whipser_language": x}), whipser_language, None)
Expand Down
3 changes: 1 addition & 2 deletions modules/block_requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,8 @@ def my_open(*args, **kwargs):
with original_open(*args, **kwargs) as f:
file_contents = f.read()

file_contents = file_contents.replace(b'\t\t<script\n\t\t\tsrc="https://cdnjs.cloudflare.com/ajax/libs/iframe-resizer/4.3.9/iframeResizer.contentWindow.min.js"\n\t\t\tasync\n\t\t></script>', b'')
file_contents = file_contents.replace(b'\t\t<script\n\t\t\tsrc="https://cdnjs.cloudflare.com/ajax/libs/iframe-resizer/4.3.7/iframeResizer.contentWindow.min.js"\n\t\t\tasync\n\t\t></script>', b'')
file_contents = file_contents.replace(b'cdnjs.cloudflare.com', b'127.0.0.1')

return io.BytesIO(file_contents)
else:
return original_open(*args, **kwargs)
9 changes: 0 additions & 9 deletions modules/gradio_hijack.py

This file was deleted.

1 change: 1 addition & 0 deletions modules/shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
'truncation_length_min': 0,
'truncation_length_max': 200000,
'max_tokens_second': 0,
'max_updates_second': 0,
'prompt_lookup_num_tokens': 0,
'custom_stopping_strings': '',
'custom_token_bans': '',
Expand Down
14 changes: 11 additions & 3 deletions modules/text_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,25 +80,33 @@ def _generate_reply(question, state, stopping_strings=None, is_chat=False, escap
state = copy.deepcopy(state)
state['stream'] = True

min_update_interval = 0
if state.get('max_updates_second', 0) > 0:
min_update_interval = 1 / state['max_updates_second']

# Generate
for reply in generate_func(question, original_question, seed, state, stopping_strings, is_chat=is_chat):
reply, stop_found = apply_stopping_strings(reply, all_stop_strings)
if escape_html:
reply = html.escape(reply)

if is_stream:
cur_time = time.time()

# Limit number of tokens/second to make text readable in real time
# Maximum number of tokens/second
if state['max_tokens_second'] > 0:
diff = 1 / state['max_tokens_second'] - (cur_time - last_update)
if diff > 0:
time.sleep(diff)

last_update = time.time()
yield reply

# Limit updates to avoid lag in the Gradio UI
# API updates are not limited
else:
yield reply
if cur_time - last_update > min_update_interval:
last_update = cur_time
yield reply

if stop_found or (state['max_tokens_second'] > 0 and shared.stop_everything):
break
Expand Down
1 change: 1 addition & 0 deletions modules/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ def list_interface_input_elements():
'max_new_tokens',
'auto_max_new_tokens',
'max_tokens_second',
'max_updates_second',
'prompt_lookup_num_tokens',
'seed',
'temperature',
Expand Down
32 changes: 16 additions & 16 deletions modules/ui_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,36 +171,36 @@ def create_event_handlers():
chat.generate_chat_reply_wrapper, gradio(inputs), gradio('display', 'history'), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')

shared.gradio['textbox'].submit(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda x: (x, ''), gradio('textbox'), gradio('Chat input', 'textbox'), show_progress=False).then(
chat.generate_chat_reply_wrapper, gradio(inputs), gradio('display', 'history'), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')

shared.gradio['Regenerate'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
partial(chat.generate_chat_reply_wrapper, regenerate=True), gradio(inputs), gradio('display', 'history'), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')

shared.gradio['Continue'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
partial(chat.generate_chat_reply_wrapper, _continue=True), gradio(inputs), gradio('display', 'history'), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')

shared.gradio['Impersonate'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda x: x, gradio('textbox'), gradio('Chat input'), show_progress=False).then(
chat.impersonate_wrapper, gradio(inputs), gradio('textbox', 'display'), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')

shared.gradio['Replace last reply'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
Expand Down Expand Up @@ -278,15 +278,15 @@ def create_event_handlers():
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
lambda x: gr.update(choices=(histories := chat.find_all_histories(x)), value=histories[0]), gradio('interface_state'), gradio('unique_id')).then(
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_chat()}}')
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_chat()}}')

shared.gradio['character_menu'].change(
chat.load_character, gradio('character_menu', 'name1', 'name2'), gradio('name1', 'name2', 'character_picture', 'greeting', 'context')).success(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
chat.load_latest_history, gradio('interface_state'), gradio('history')).then(
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
lambda x: gr.update(choices=(histories := chat.find_all_histories(x)), value=histories[0]), gradio('interface_state'), gradio('unique_id')).then(
lambda: None, None, None, js=f'() => {{{ui.update_big_picture_js}; updateBigPicture()}}')
lambda: None, None, None, _js=f'() => {{{ui.update_big_picture_js}; updateBigPicture()}}')

shared.gradio['mode'].change(
lambda x: gr.update(visible=x != 'instruct'), gradio('mode'), gradio('chat_style'), show_progress=False).then(
Expand Down Expand Up @@ -322,15 +322,15 @@ def create_event_handlers():

shared.gradio['save_chat_history'].click(
lambda x: json.dumps(x, indent=4), gradio('history'), gradio('temporary_text')).then(
None, gradio('temporary_text', 'character_menu', 'mode'), None, js=f'(hist, char, mode) => {{{ui.save_files_js}; saveHistory(hist, char, mode)}}')
None, gradio('temporary_text', 'character_menu', 'mode'), None, _js=f'(hist, char, mode) => {{{ui.save_files_js}; saveHistory(hist, char, mode)}}')

shared.gradio['Submit character'].click(
chat.upload_character, gradio('upload_json', 'upload_img_bot'), gradio('character_menu')).then(
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_character()}}')
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_character()}}')

shared.gradio['Submit tavern character'].click(
chat.upload_tavern_character, gradio('upload_img_tavern', 'tavern_json'), gradio('character_menu')).then(
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_character()}}')
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_character()}}')

shared.gradio['upload_json'].upload(lambda: gr.update(interactive=True), None, gradio('Submit character'))
shared.gradio['upload_json'].clear(lambda: gr.update(interactive=False), None, gradio('Submit character'))
Expand All @@ -344,28 +344,28 @@ def create_event_handlers():
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda x: x.update({'mode': 'instruct', 'history': {'internal': [], 'visible': []}}), gradio('interface_state'), None).then(
partial(chat.generate_chat_prompt, 'Input'), gradio('interface_state'), gradio('textbox-default')).then(
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_default()}}')
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_default()}}')

shared.gradio['send_instruction_to_notebook'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda x: x.update({'mode': 'instruct', 'history': {'internal': [], 'visible': []}}), gradio('interface_state'), None).then(
partial(chat.generate_chat_prompt, 'Input'), gradio('interface_state'), gradio('textbox-notebook')).then(
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_notebook()}}')
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_notebook()}}')

shared.gradio['send_instruction_to_negative_prompt'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda x: x.update({'mode': 'instruct', 'history': {'internal': [], 'visible': []}}), gradio('interface_state'), None).then(
partial(chat.generate_chat_prompt, 'Input'), gradio('interface_state'), gradio('negative_prompt')).then(
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_generation_parameters()}}')
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_generation_parameters()}}')

shared.gradio['send-chat-to-default'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
partial(chat.generate_chat_prompt, '', _continue=True), gradio('interface_state'), gradio('textbox-default')).then(
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_default()}}')
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_default()}}')

shared.gradio['send-chat-to-notebook'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
partial(chat.generate_chat_prompt, '', _continue=True), gradio('interface_state'), gradio('textbox-notebook')).then(
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_notebook()}}')
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_notebook()}}')

shared.gradio['show_controls'].change(None, gradio('show_controls'), None, js=f'(x) => {{{ui.show_controls_js}; toggle_controls(x)}}')
shared.gradio['show_controls'].change(None, gradio('show_controls'), None, _js=f'(x) => {{{ui.show_controls_js}; toggle_controls(x)}}')
6 changes: 3 additions & 3 deletions modules/ui_default.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,21 +67,21 @@ def create_event_handlers():
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')

shared.gradio['textbox-default'].submit(
lambda x: x, gradio('textbox-default'), gradio('last_input-default')).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')

shared.gradio['markdown_render-default'].click(lambda x: x, gradio('output_textbox'), gradio('markdown-default'), queue=False)
shared.gradio['Continue-default'].click(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
generate_reply_wrapper, [shared.gradio['output_textbox']] + gradio(inputs)[1:], gradio(outputs), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')

shared.gradio['Stop-default'].click(stop_everything_event, None, None, queue=False)
shared.gradio['prompt_menu-default'].change(load_prompt, gradio('prompt_menu-default'), gradio('textbox-default'), show_progress=False)
Expand Down
2 changes: 1 addition & 1 deletion modules/ui_model_menu.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def create_ui():
with gr.Row():
with gr.Column():
shared.gradio['loader'] = gr.Dropdown(label="Model loader", choices=loaders.loaders_and_params.keys(), value=None)
with gr.Blocks():
with gr.Box():
with gr.Row():
with gr.Column():
with gr.Blocks():
Expand Down
6 changes: 3 additions & 3 deletions modules/ui_notebook.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,14 +67,14 @@ def create_event_handlers():
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')

shared.gradio['textbox-notebook'].submit(
lambda x: x, gradio('textbox-notebook'), gradio('last_input-notebook')).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')

shared.gradio['Undo'].click(lambda x: x, gradio('last_input-notebook'), gradio('textbox-notebook'), show_progress=False)
shared.gradio['markdown_render-notebook'].click(lambda x: x, gradio('textbox-notebook'), gradio('markdown-notebook'), queue=False)
Expand All @@ -83,7 +83,7 @@ def create_event_handlers():
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')

shared.gradio['Stop-notebook'].click(stop_everything_event, None, None, queue=False)
shared.gradio['prompt_menu-notebook'].change(load_prompt, gradio('prompt_menu-notebook'), gradio('textbox-notebook'), show_progress=False)
Expand Down
1 change: 1 addition & 0 deletions modules/ui_parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ def create_ui(default_preset):
with gr.Column():
shared.gradio['truncation_length'] = gr.Slider(value=get_truncation_length(), minimum=shared.settings['truncation_length_min'], maximum=shared.settings['truncation_length_max'], step=256, label='Truncate the prompt up to this length', info='The leftmost tokens are removed if the prompt exceeds this length. Most models require this to be at most 2048.')
shared.gradio['max_tokens_second'] = gr.Slider(value=shared.settings['max_tokens_second'], minimum=0, maximum=20, step=1, label='Maximum tokens/second', info='To make text readable in real time.')
shared.gradio['max_updates_second'] = gr.Slider(value=shared.settings['max_updates_second'], minimum=0, maximum=24, step=1, label='Maximum UI updates/second', info='Set this if you experience lag in the UI during streaming.')
shared.gradio['prompt_lookup_num_tokens'] = gr.Slider(value=shared.settings['prompt_lookup_num_tokens'], minimum=0, maximum=10, step=1, label='prompt_lookup_num_tokens', info='Activates Prompt Lookup Decoding.')

shared.gradio['custom_stopping_strings'] = gr.Textbox(lines=1, value=shared.settings["custom_stopping_strings"] or None, label='Custom stopping strings', info='In addition to the defaults. Written between "" and separated by commas.', placeholder='"\\n", "\\nYou:"')
Expand Down
Loading

0 comments on commit 080f713

Please sign in to comment.