Skip to content

Commit

Permalink
chore(openAI): increase maximum number of tokens
Browse files Browse the repository at this point in the history
  • Loading branch information
yalattas committed Jul 2, 2023
1 parent 0d499c1 commit 431e5d0
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion mindmate/commands/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def model_option_callback(ctx, param, value):
@click.option('-m', '--model', required=True, default='text-davinci-003', show_default=True, type=str, callback=model_option_callback, help='select targeted model to utilize')
@click.option('-p', '--prompt', required=True, show_default=False, type=str, help='Your prompt to AI')
@click.option('-s', '--stream', required=False, default=True, show_default=True, type=bool, help='stream AI response on your terminal')
@click.option('--max-tokens', required=False, default=100, show_default=True, type=int, help='stream AI response on your terminal')
@click.option('--max-tokens', required=False, default=3000, show_default=True, type=int, help='stream AI response on your terminal')
def chat(platform, model, prompt, stream, max_tokens):
"""offers text-based response to your prompt"""
click.echo(help.generic_message())
Expand Down
2 changes: 1 addition & 1 deletion mindmate/services/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def ask_ai_with_stream(self, prompt: Prompt, model: str, max_tokens=5000, n=1) -
try:
completion = openai.Completion.create(
model=model,
temperature=0,
temperature=0.2,
stream=True,
max_tokens=max_tokens,
user=self.openai_id,
Expand Down

0 comments on commit 431e5d0

Please sign in to comment.