Skip to content

Commit

Permalink
Add better model validations
Browse files Browse the repository at this point in the history
  • Loading branch information
mruwnik committed Oct 3, 2023
1 parent 0881a01 commit 3f39eac
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 18 deletions.
22 changes: 15 additions & 7 deletions api/src/stampy_chat/settings.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
from collections import namedtuple
import tiktoken

from stampy_chat.env import COMPLETIONS_MODEL


Model = namedtuple('Model', ['maxTokens', 'topKBlocks'])


SOURCE_PROMPT = (
"You are a helpful assistant knowledgeable about AI Alignment and Safety. "
"Please give a clear and coherent answer to the user's questions.(written after \"Q:\") "
Expand Down Expand Up @@ -44,6 +48,12 @@
'question': QUESTION_PROMPT,
'modes': PROMPT_MODES,
}
MODELS = {
'gpt-3.5-turbo': Model(4097, 10),
'gpt-3.5-turbo-16k': Model(16385, 30),
'gpt-4': Model(8192, 20),
# 'gpt-4-32k': Model(32768, 30),
}


class Settings:
Expand Down Expand Up @@ -99,23 +109,21 @@ def encoder(self, value):
self.encoders[value] = tiktoken.get_encoding(value)

def set_completions(self, completions, numTokens=None, topKBlocks=None):
if completions not in MODELS:
raise ValueError(f'Unknown model: {completions}')
self.completions = completions

# Set the max number of tokens sent in the prompt
# Set the max number of tokens sent in the prompt - see https://platform.openai.com/docs/models/gpt-4
if numTokens is not None:
self.numTokens = numTokens
elif completions == 'gtp-4':
self.numTokens = 8191
else:
self.numTokens = 4095
self.numTokens = MODELS[completions].maxTokens

# Set the max number of blocks used as citations
if topKBlocks is not None:
self.topKBlocks = topKBlocks
elif completions == 'gtp-4':
self.topKBlocks = 20
else:
self.topKBlocks = 10
self.topKBlocks = MODELS[completions].topKBlocks

@property
def prompt_modes(self):
Expand Down
41 changes: 30 additions & 11 deletions web/src/pages/playground.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -42,19 +42,24 @@ const DEFAULT_PROMPTS = {
'rather than just giving a formal definition.\n\n',
},
}
const MODELS = {
'gpt-3.5-turbo': {numTokens: 4095, topKBlocks: 10},
'gpt-3.5-turbo-16k': {numTokens: 16385, topKBlocks: 30},
'gpt-4': {numTokens: 8192, topKBlocks: 20},
/* 'gpt-4-32k': {numTokens: 32768, topKBlocks: 30}, */
}
const DEFAULT_SETTINGS = {
prompts: DEFAULT_PROMPTS,
mode: 'default' as Mode,
completions: 'gpt-3.5-turbo',
encoder: 'cl100k_base',
topKBlocks: 10, // the number of blocks to use as citations
numTokens: 4095,
topKBlocks: MODELS['gpt-3.5-turbo'].topKBlocks, // the number of blocks to use as citations
numTokens: MODELS['gpt-3.5-turbo'].numTokens,
tokensBuffer: 50, // the number of tokens to leave as a buffer when calculating remaining tokens
maxHistory: 10, // the max number of previous items to use as history
historyFraction: 0.25, // the (approximate) fraction of num_tokens to use for history text before truncating
contextFraction: 0.5, // the (approximate) fraction of num_tokens to use for context text before truncating
}
const COMPLETION_MODELS = ['gpt-3.5-turbo', 'gpt-4']
const ENCODERS = ['cl100k_base']

const updateIn = (obj: {[key: string]: any}, [head, ...rest]: string[], val: any) => {
Expand Down Expand Up @@ -157,14 +162,12 @@ type ChatSettingsParams = {
}

const ChatSettings = ({settings, updateSettings}: ChatSettingsParams) => {
const changeVal = (field: string, value: any) =>
updateSettings((prev) => ({...prev, [field]: value}))
const update = (setting: string) => (event: ChangeEvent) => {
updateSettings((prev) => ({
...prev,
[setting]: (event.target as HTMLInputElement).value,
}))
changeVal(setting, (event.target as HTMLInputElement).value)
}
const updateNum = (field: string) => (num: Parseable) =>
updateSettings((prev) => ({...prev, [field]: num}))
const updateNum = (field: string) => (num: Parseable) => changeVal(field, num)

return (
<div
Expand All @@ -179,9 +182,24 @@ const ChatSettings = ({settings, updateSettings}: ChatSettingsParams) => {
name="completions-model"
className="col-span-2"
value={settings.completions}
onChange={update('completions')}
onChange={(event: ChangeEvent) => {
const value = (event.target as HTMLInputElement).value
const {numTokens, topKBlocks} = MODELS[value as keyof typeof MODELS]
const prevNumTokens = MODELS[settings.completions as keyof typeof MODELS].numTokens
const prevTopKBlocks = MODELS[settings.completions as keyof typeof MODELS].topKBlocks

if (settings.numTokens === prevNumTokens) {
changeVal('numTokens', numTokens)
} else {
changeVal('numTokens', Math.min(settings.numTokens || 0, numTokens))
}
if (settings.topKBlocks === prevTopKBlocks) {
changeVal('topKBlocks', topKBlocks)
}
changeVal('completions', value)
}}
>
{COMPLETION_MODELS.map((name) => (
{Object.keys(MODELS).map((name) => (
<option value={name} key={name}>
{name}
</option>
Expand Down Expand Up @@ -210,6 +228,7 @@ const ChatSettings = ({settings, updateSettings}: ChatSettingsParams) => {
field="numTokens"
label="Tokens"
min="1"
max={MODELS[settings.completions as keyof typeof MODELS].numTokens}
updater={updateNum('numTokens')}
/>
<NumberInput
Expand Down

0 comments on commit 3f39eac

Please sign in to comment.