Skip to content

Commit

Permalink
Show terms of use as an JS alert (#2461)
Browse files Browse the repository at this point in the history
  • Loading branch information
merrymercy authored Sep 22, 2023
1 parent c4c195c commit a040cdc
Show file tree
Hide file tree
Showing 11 changed files with 193 additions and 187 deletions.
2 changes: 1 addition & 1 deletion docs/commands/webserver.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ cd fastchat_logs/server0
export OPENAI_API_KEY=
export ANTHROPIC_API_KEY=
python3 -m fastchat.serve.gradio_web_server_multi --controller http://localhost:21001 --concurrency 10 --add-chatgpt --add-claude --add-palm --anony-only --elo ~/elo_results/elo_results_20230802.pkl --leaderboard-table-file ~/elo_results/leaderboard_table_20230802.csv --register ~/elo_results/register_oai_models.json
python3 -m fastchat.serve.gradio_web_server_multi --controller http://localhost:21001 --concurrency 10 --add-chatgpt --add-claude --add-palm --anony-only --elo ~/elo_results/elo_results.pkl --leaderboard-table-file ~/elo_results/leaderboard_table.csv --register ~/elo_results/register_oai_models.json --show-terms
python3 backup_logs.py
```
Expand Down
29 changes: 29 additions & 0 deletions fastchat/llm_judge/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,6 +418,35 @@ def chat_compeletion_openai(model, conv, temperature, max_tokens):
return output


def chat_compeletion_openai_azure(model, conv, temperature, max_tokens):
openai.api_type = "azure"
openai.api_base = os.environ["AZURE_OPENAI_ENDPOINT"]
openai.api_key = os.environ["AZURE_OPENAI_KEY"]
openai.api_version = "2023-05-15"

if "azure-" in model:
model = model[6:]

output = API_ERROR_OUTPUT
for _ in range(API_MAX_RETRY):
try:
messages = conv.to_openai_api_messages()
response = openai.ChatCompletion.create(
engine=model,
messages=messages,
n=1,
temperature=temperature,
max_tokens=max_tokens,
)
output = response["choices"][0]["message"]["content"]
break
except openai.error.OpenAIError as e:
print(type(e), e)
time.sleep(API_RETRY_SLEEP)

return output


def chat_compeletion_anthropic(model, conv, temperature, max_tokens):
output = API_ERROR_OUTPUT
for _ in range(API_MAX_RETRY):
Expand Down
8 changes: 7 additions & 1 deletion fastchat/model/model_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,13 @@ def register_model_info(


def get_model_info(name: str) -> ModelInfo:
return model_info[name]
if name in model_info:
return model_info[name]
else:
# To fix this, please use `register_model_info` to register your model
return ModelInfo(
name, "", "Register the description at fastchat/model/model_registry.py"
)


register_model_info(
Expand Down
32 changes: 5 additions & 27 deletions fastchat/serve/gradio_block_arena_anony.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,17 +57,7 @@ def load_demo_side_by_side_anony(models_, url_params):
gr.Markdown.update(visible=True),
)

return (
states
+ selector_updates
+ (gr.Chatbot.update(visible=True),) * num_sides
+ (
gr.Textbox.update(visible=True),
gr.Box.update(visible=True),
gr.Row.update(visible=True),
gr.Accordion.update(visible=True),
)
)
return states + selector_updates


def vote_last_response(states, vote_type, model_selectors, request: gr.Request):
Expand Down Expand Up @@ -388,9 +378,6 @@ def build_side_by_side_ui_anony(models):
### Leaderboard
See [lmsys/chatbot-arena-leaderboard](https://huggingface.co/spaces/lmsys/chatbot-arena-leaderboard) or the 4th tab above on this page.
### Terms of use
By using this service, users are required to agree to the following terms: The service is a research preview intended for non-commercial use only. It only provides limited safety measures and may generate offensive content. It must not be used for any illegal, harmful, violent, racist, or sexual purposes. **The service collects user dialogue data and reserves the right to distribute it under a Creative Commons Attribution (CC-BY) license.** The demo works better on desktop devices with a wide screen.
### Battle
Please scroll down and start chatting. The models include both closed-source models (e.g., ChatGPT) and open-source models (e.g., Llama).
"""
Expand All @@ -407,7 +394,7 @@ def build_side_by_side_ui_anony(models):
label = "Model A" if i == 0 else "Model B"
with gr.Column():
chatbots[i] = gr.Chatbot(
label=label, elem_id=f"chatbot", visible=False, height=550
label=label, elem_id=f"chatbot", height=550
)

with gr.Row():
Expand All @@ -432,19 +419,18 @@ def build_side_by_side_ui_anony(models):
textbox = gr.Textbox(
show_label=False,
placeholder="Enter your prompt here and press ENTER",
visible=False,
container=False,
elem_id="input_box",
)
with gr.Column(scale=1, min_width=50):
send_btn = gr.Button(value="Send", visible=False, variant="primary")
send_btn = gr.Button(value="Send", variant="primary")

with gr.Row() as button_row:
clear_btn = gr.Button(value="🗑️ Clear history", interactive=False)
regenerate_btn = gr.Button(value="🔄 Regenerate", interactive=False)
share_btn = gr.Button(value="📷 Share")

with gr.Accordion("Parameters", open=False, visible=True) as parameter_row:
with gr.Accordion("Parameters", open=False) as parameter_row:
temperature = gr.Slider(
minimum=0.0,
maximum=1.0,
Expand Down Expand Up @@ -560,12 +546,4 @@ def build_side_by_side_ui_anony(models):
flash_buttons, [], btn_list
)

return (
states,
model_selectors,
chatbots,
textbox,
send_btn,
button_row,
parameter_row,
)
return states + model_selectors
35 changes: 8 additions & 27 deletions fastchat/serve/gradio_block_arena_named.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,17 +62,7 @@ def load_demo_side_by_side_named(models, url_params):
gr.Dropdown.update(choices=models, value=model_right, visible=True),
)

return (
states
+ selector_updates
+ (gr.Chatbot.update(visible=True),) * num_sides
+ (
gr.Textbox.update(visible=True),
gr.Box.update(visible=True),
gr.Row.update(visible=True),
gr.Accordion.update(visible=True),
)
)
return states + selector_updates


def vote_last_response(states, vote_type, model_selectors, request: gr.Request):
Expand Down Expand Up @@ -313,10 +303,10 @@ def build_side_by_side_ui_named(models):
- You can do multiple turns of conversations before voting.
- Click "Clear history" to start a new round.
### Terms of use
By using this service, users are required to agree to the following terms: The service is a research preview intended for non-commercial use only. It only provides limited safety measures and may generate offensive content. It must not be used for any illegal, harmful, violent, racist, or sexual purposes. **The service collects user dialogue data and reserves the right to distribute it under a Creative Commons Attribution (CC-BY) license.** The demo works better on desktop devices with a wide screen.
### Leaderboard
See [lmsys/chatbot-arena-leaderboard](https://huggingface.co/spaces/lmsys/chatbot-arena-leaderboard) or the 4th tab above on this page.
### Choose two models to chat with (view [leaderboard](https://huggingface.co/spaces/lmsys/chatbot-arena-leaderboard))
### Choose two models to chat with
"""

states = [gr.State() for _ in range(num_sides)]
Expand Down Expand Up @@ -345,7 +335,7 @@ def build_side_by_side_ui_named(models):
label = "Model A" if i == 0 else "Model B"
with gr.Column():
chatbots[i] = gr.Chatbot(
label=label, elem_id=f"chatbot", visible=False, height=550
label=label, elem_id=f"chatbot", height=550
)

with gr.Row():
Expand All @@ -365,19 +355,18 @@ def build_side_by_side_ui_named(models):
textbox = gr.Textbox(
show_label=False,
placeholder="Enter your prompt here and press ENTER",
visible=False,
container=False,
elem_id="input_box",
)
with gr.Column(scale=1, min_width=50):
send_btn = gr.Button(value="Send", visible=False, variant="primary")
send_btn = gr.Button(value="Send", variant="primary")

with gr.Row() as button_row:
regenerate_btn = gr.Button(value="🔄 Regenerate", interactive=False)
clear_btn = gr.Button(value="🗑️ Clear history", interactive=False)
share_btn = gr.Button(value="📷 Share")

with gr.Accordion("Parameters", open=False, visible=True) as parameter_row:
with gr.Accordion("Parameters", open=False) as parameter_row:
temperature = gr.Slider(
minimum=0.0,
maximum=1.0,
Expand Down Expand Up @@ -495,12 +484,4 @@ def build_side_by_side_ui_named(models):
flash_buttons, [], btn_list
)

return (
states,
model_selectors,
chatbots,
textbox,
send_btn,
button_row,
parameter_row,
)
return states + model_selectors
Loading

0 comments on commit a040cdc

Please sign in to comment.