Skip to content

Commit

Permalink
Show terms of use as an alert
Browse files Browse the repository at this point in the history
  • Loading branch information
merrymercy committed Sep 21, 2023
1 parent 3157116 commit 3e4a869
Show file tree
Hide file tree
Showing 5 changed files with 76 additions and 161 deletions.
32 changes: 5 additions & 27 deletions fastchat/serve/gradio_block_arena_anony.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,17 +57,7 @@ def load_demo_side_by_side_anony(models_, url_params):
gr.Markdown.update(visible=True),
)

return (
states
+ selector_updates
+ (gr.Chatbot.update(visible=True),) * num_sides
+ (
gr.Textbox.update(visible=True),
gr.Box.update(visible=True),
gr.Row.update(visible=True),
gr.Accordion.update(visible=True),
)
)
return states + selector_updates


def vote_last_response(states, vote_type, model_selectors, request: gr.Request):
Expand Down Expand Up @@ -388,9 +378,6 @@ def build_side_by_side_ui_anony(models):
### Leaderboard
See [lmsys/chatbot-arena-leaderboard](https://huggingface.co/spaces/lmsys/chatbot-arena-leaderboard) or the 4th tab above on this page.
### Terms of use
By using this service, users are required to agree to the following terms: The service is a research preview intended for non-commercial use only. It only provides limited safety measures and may generate offensive content. It must not be used for any illegal, harmful, violent, racist, or sexual purposes. **The service collects user dialogue data and reserves the right to distribute it under a Creative Commons Attribution (CC-BY) license.** The demo works better on desktop devices with a wide screen.
### Battle
Please scroll down and start chatting. The models include both closed-source models (e.g., ChatGPT) and open-source models (e.g., Llama).
"""
Expand All @@ -407,7 +394,7 @@ def build_side_by_side_ui_anony(models):
label = "Model A" if i == 0 else "Model B"
with gr.Column():
chatbots[i] = gr.Chatbot(
label=label, elem_id=f"chatbot", visible=False, height=550
label=label, elem_id=f"chatbot", height=550
)

with gr.Row():
Expand All @@ -432,19 +419,18 @@ def build_side_by_side_ui_anony(models):
textbox = gr.Textbox(
show_label=False,
placeholder="Enter your prompt here and press ENTER",
visible=False,
container=False,
elem_id="input_box",
)
with gr.Column(scale=1, min_width=50):
send_btn = gr.Button(value="Send", visible=False, variant="primary")
send_btn = gr.Button(value="Send", variant="primary")

with gr.Row() as button_row:
clear_btn = gr.Button(value="🗑️ Clear history", interactive=False)
regenerate_btn = gr.Button(value="🔄 Regenerate", interactive=False)
share_btn = gr.Button(value="📷 Share")

with gr.Accordion("Parameters", open=False, visible=True) as parameter_row:
with gr.Accordion("Parameters", open=False) as parameter_row:
temperature = gr.Slider(
minimum=0.0,
maximum=1.0,
Expand Down Expand Up @@ -560,12 +546,4 @@ def build_side_by_side_ui_anony(models):
flash_buttons, [], btn_list
)

return (
states,
model_selectors,
chatbots,
textbox,
send_btn,
button_row,
parameter_row,
)
return states + model_selectors
32 changes: 5 additions & 27 deletions fastchat/serve/gradio_block_arena_named.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,17 +62,7 @@ def load_demo_side_by_side_named(models, url_params):
gr.Dropdown.update(choices=models, value=model_right, visible=True),
)

return (
states
+ selector_updates
+ (gr.Chatbot.update(visible=True),) * num_sides
+ (
gr.Textbox.update(visible=True),
gr.Box.update(visible=True),
gr.Row.update(visible=True),
gr.Accordion.update(visible=True),
)
)
return states + selector_updates


def vote_last_response(states, vote_type, model_selectors, request: gr.Request):
Expand Down Expand Up @@ -313,9 +303,6 @@ def build_side_by_side_ui_named(models):
- You can do multiple turns of conversations before voting.
- Click "Clear history" to start a new round.
### Terms of use
By using this service, users are required to agree to the following terms: The service is a research preview intended for non-commercial use only. It only provides limited safety measures and may generate offensive content. It must not be used for any illegal, harmful, violent, racist, or sexual purposes. **The service collects user dialogue data and reserves the right to distribute it under a Creative Commons Attribution (CC-BY) license.** The demo works better on desktop devices with a wide screen.
### Choose two models to chat with (view [leaderboard](https://huggingface.co/spaces/lmsys/chatbot-arena-leaderboard))
"""

Expand Down Expand Up @@ -345,7 +332,7 @@ def build_side_by_side_ui_named(models):
label = "Model A" if i == 0 else "Model B"
with gr.Column():
chatbots[i] = gr.Chatbot(
label=label, elem_id=f"chatbot", visible=False, height=550
label=label, elem_id=f"chatbot", height=550
)

with gr.Row():
Expand All @@ -365,19 +352,18 @@ def build_side_by_side_ui_named(models):
textbox = gr.Textbox(
show_label=False,
placeholder="Enter your prompt here and press ENTER",
visible=False,
container=False,
elem_id="input_box",
)
with gr.Column(scale=1, min_width=50):
send_btn = gr.Button(value="Send", visible=False, variant="primary")
send_btn = gr.Button(value="Send", variant="primary")

with gr.Row() as button_row:
regenerate_btn = gr.Button(value="🔄 Regenerate", interactive=False)
clear_btn = gr.Button(value="🗑️ Clear history", interactive=False)
share_btn = gr.Button(value="📷 Share")

with gr.Accordion("Parameters", open=False, visible=True) as parameter_row:
with gr.Accordion("Parameters", open=False) as parameter_row:
temperature = gr.Slider(
minimum=0.0,
maximum=1.0,
Expand Down Expand Up @@ -495,12 +481,4 @@ def build_side_by_side_ui_named(models):
flash_buttons, [], btn_list
)

return (
states,
model_selectors,
chatbots,
textbox,
send_btn,
button_row,
parameter_row,
)
return states + model_selectors
69 changes: 28 additions & 41 deletions fastchat/serve/gradio_web_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
SESSION_EXPIRATION_TIME,
)
from fastchat.model.model_adapter import get_conversation_template
from fastchat.model.model_registry import get_model_info
from fastchat.model.model_registry import get_model_info, model_info
from fastchat.serve.api_provider import (
anthropic_api_stream_iter,
openai_api_stream_iter,
Expand All @@ -39,6 +39,7 @@
build_logger,
violates_moderation,
get_window_url_params_js,
get_window_url_params_with_tos_js,
parse_gradio_auth_creds,
)

Expand Down Expand Up @@ -163,15 +164,7 @@ def load_demo_single(models, url_params):
)

state = None
return (
state,
dropdown_update,
gr.Chatbot.update(visible=True),
gr.Textbox.update(visible=True),
gr.Button.update(visible=True),
gr.Row.update(visible=True),
gr.Accordion.update(visible=True),
)
return [state, dropdown_update]


def load_demo(url_params, request: gr.Request):
Expand Down Expand Up @@ -560,9 +553,6 @@ def build_single_model_ui(models, add_promotion_links=False):
# 🏔️ Chat with Open Large Language Models
{promotion}
### Terms of use
By using this service, users are required to agree to the following terms: The service is a research preview intended for non-commercial use only. It only provides limited safety measures and may generate offensive content. It must not be used for any illegal, harmful, violent, racist, or sexual purposes. **The service collects user dialogue data and reserves the right to distribute it under a Creative Commons Attribution (CC-BY) license.**
### Choose a model to chat with
"""

Expand All @@ -582,29 +572,27 @@ def build_single_model_ui(models, add_promotion_links=False):
chatbot = gr.Chatbot(
elem_id="chatbot",
label="Scroll down and start chatting",
visible=False,
height=550,
)
with gr.Row():
with gr.Column(scale=20):
textbox = gr.Textbox(
show_label=False,
placeholder="Enter your prompt here and press ENTER",
visible=False,
container=False,
elem_id="input_box",
)
with gr.Column(scale=1, min_width=50):
send_btn = gr.Button(value="Send", visible=False, variant="primary")
send_btn = gr.Button(value="Send", variant="primary")

with gr.Row(visible=False) as button_row:
with gr.Row() as button_row:
upvote_btn = gr.Button(value="👍 Upvote", interactive=False)
downvote_btn = gr.Button(value="👎 Downvote", interactive=False)
flag_btn = gr.Button(value="⚠️ Flag", interactive=False)
regenerate_btn = gr.Button(value="🔄 Regenerate", interactive=False)
clear_btn = gr.Button(value="🗑️ Clear history", interactive=False)

with gr.Accordion("Parameters", open=False, visible=False) as parameter_row:
with gr.Accordion("Parameters", open=False) as parameter_row:
temperature = gr.Slider(
minimum=0.0,
maximum=1.0,
Expand Down Expand Up @@ -667,49 +655,42 @@ def build_single_model_ui(models, add_promotion_links=False):
[state, chatbot] + btn_list,
)
send_btn.click(
add_text, [state, model_selector, textbox], [state, chatbot, textbox] + btn_list
add_text, [state, model_selector, textbox], [state, chatbot, textbox] + btn_list,
).then(
bot_response,
[state, temperature, top_p, max_output_tokens],
[state, chatbot] + btn_list,
)

return state, model_selector, chatbot, textbox, send_btn, button_row, parameter_row
return [state, model_selector]


def build_demo(models):
with gr.Blocks(
title="Chat with Open Large Language Models",
theme=gr.themes.Base(),
theme=gr.themes.Default(),
css=block_css,
) as demo:
url_params = gr.JSON(visible=False)

(
state,
model_selector,
chatbot,
textbox,
send_btn,
button_row,
parameter_row,
) = build_single_model_ui(models)
state, model_selector = build_single_model_ui(models)

if args.model_list_mode not in ["once", "reload"]:
raise ValueError(f"Unknown model list mode: {args.model_list_mode}")

if args.show_terms_of_use:
load_js = get_window_url_params_with_tos_js
else:
load_js = get_window_url_params_js

demo.load(
load_demo,
[url_params],
[
state,
model_selector,
chatbot,
textbox,
send_btn,
button_row,
parameter_row,
],
_js=get_window_url_params_js,
_js=load_js
)

return demo
Expand All @@ -722,29 +703,35 @@ def build_demo(models):
parser.add_argument(
"--share",
action="store_true",
help="Whether to generate a public, shareable link.",
help="Whether to generate a public, shareable link",
)
parser.add_argument(
"--controller-url",
type=str,
default="http://localhost:21001",
help="The address of the controller.",
help="The address of the controller",
)
parser.add_argument(
"--concurrency-count",
type=int,
default=10,
help="The concurrency count of the gradio queue.",
help="The concurrency count of the gradio queue",
)
parser.add_argument(
"--model-list-mode",
type=str,
default="once",
choices=["once", "reload"],
help="Whether to load the model list once or reload the model list every time.",
help="Whether to load the model list once or reload the model list every time",
)
parser.add_argument(
"--moderate", action="store_true",
help="Enable content moderation to block unsafe inputs"
)
parser.add_argument(
"--moderate", action="store_true", help="Enable content moderation"
"--show-terms-of-use",
action="store_true",
help="Shows term of use before loading the demo",
)
parser.add_argument(
"--add-chatgpt",
Expand Down
Loading

0 comments on commit 3e4a869

Please sign in to comment.