Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable User ID in environment and advanced model configuration #155

Open
wants to merge 1 commit into
base: v1-dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# [Optional] Set the user id for OpenAI and Helicone to track usage
USER_ID=

# [Recommended for local deployments] Backend API key for OpenAI, so that users don't need one (UI > this > '')
OPENAI_API_KEY=
# [Optional] Sets the "OpenAI-Organization" header field to support organization users (UI > this > '')
Expand Down
2 changes: 2 additions & 0 deletions src/common/types/env.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ declare namespace NodeJS {

// available to the server-side
interface ProcessEnv {
// OpenAI and Helicone
USER_ID: string;

// LLM: OpenAI
OPENAI_API_KEY: string;
Expand Down
25 changes: 21 additions & 4 deletions src/modules/llms/openai/OpenAISourceSetup.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import * as React from 'react';

import { Box, Button, FormControl, FormHelperText, FormLabel, Input, Switch } from '@mui/joy';
import SyncIcon from '@mui/icons-material/Sync';
import { Box, Button, FormControl, FormHelperText, FormLabel, Input, Switch } from '@mui/joy';

import { apiQuery } from '~/modules/trpc/trpc.client';

Expand All @@ -12,9 +12,9 @@ import { Link } from '~/common/components/Link';
import { settingsCol1Width, settingsGap } from '~/common/theme';

import { DLLM, DModelSource, DModelSourceId } from '../llm.types';
import { OpenAI } from './openai.types';
import { hasServerKeyOpenAI, isValidOpenAIApiKey, LLMOptionsOpenAI, ModelVendorOpenAI } from './openai.vendor';
import { useModelsStore, useSourceSetup } from '../store-llms';
import { OpenAI } from './openai.types';
import { LLMOptionsOpenAI, ModelVendorOpenAI, hasServerKeyOpenAI, isValidOpenAIApiKey } from './openai.vendor';


export function OpenAISourceSetup(props: { sourceId: DModelSourceId }) {
Expand All @@ -25,7 +25,7 @@ export function OpenAISourceSetup(props: { sourceId: DModelSourceId }) {
// external state
const {
source, sourceLLMs, updateSetup,
normSetup: { heliKey, oaiHost, oaiKey, oaiOrg, moderationCheck },
normSetup: { heliKey, oaiHost, oaiKey, oaiOrg, moderationCheck, userId },
} = useSourceSetup(props.sourceId, ModelVendorOpenAI.normalizeSetup);

const hasModels = !!sourceLLMs.length;
Expand Down Expand Up @@ -110,6 +110,23 @@ export function OpenAISourceSetup(props: { sourceId: DModelSourceId }) {
/>
</FormControl>}

{showAdvanced && <FormControl orientation='horizontal' sx={{ flexWrap: 'wrap', justifyContent: 'space-between' }}>
<Box sx={{ minWidth: settingsCol1Width }}>
<FormLabel>
User ID
</FormLabel>
<FormHelperText sx={{ display: 'block' }}>
<Link level='body-sm' href='https://docs.helicone.ai/features/advanced-usage/user-metrics' target='_blank'>helicone</Link>,
<Link level='body-sm' href='https://platform.openai.com/docs/guides/safety-best-practices/end-user-ids' target='_blank'>OpenAI</Link>
</FormHelperText>
</Box>
<Input
variant='outlined' placeholder='big-agi'
value={userId} onChange={event => updateSetup({ userId: event.target.value })}
sx={{ flexGrow: 1 }}
/>
</FormControl>}

{showAdvanced && <FormControl orientation='horizontal' sx={{ flexWrap: 'wrap', justifyContent: 'space-between' }}>
<Box sx={{ minWidth: settingsCol1Width }}>
<FormLabel>
Expand Down
13 changes: 11 additions & 2 deletions src/modules/llms/openai/openai.router.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { z } from 'zod';
import { TRPCError } from '@trpc/server';
import { z } from 'zod';

import { createTRPCRouter, publicProcedure } from '~/modules/trpc/trpc.server';
import { fetchJsonOrTRPCError } from '~/modules/trpc/trpc.serverutils';
Expand All @@ -19,12 +19,14 @@ const accessSchema = z.object({
oaiHost: z.string().trim(),
heliKey: z.string().trim(),
moderationCheck: z.boolean(),
userId: z.string().trim().optional(),
});

export const modelSchema = z.object({
id: z.string(),
temperature: z.number().min(0).max(1).optional(),
maxTokens: z.number().min(1).max(1000000),
userId: z.string().optional()
});

export const historySchema = z.array(z.object({
Expand Down Expand Up @@ -202,6 +204,9 @@ export function openAIAccess(access: AccessSchema, apiPath: string): { headers:
// Organization ID
const oaiOrg = access.oaiOrg || process.env.OPENAI_API_ORG_ID || '';

// User ID
const userId = access.userId || process.env.USER_ID || '';

// API host
let oaiHost = access.oaiHost || process.env.OPENAI_API_HOST || DEFAULT_OPENAI_HOST;
if (!oaiHost.startsWith('http'))
Expand All @@ -221,19 +226,23 @@ export function openAIAccess(access: AccessSchema, apiPath: string): { headers:
...(oaiKey && { Authorization: `Bearer ${oaiKey}` }),
'Content-Type': 'application/json',
...(oaiOrg && { 'OpenAI-Organization': oaiOrg }),
...(heliKey && { 'Helicone-Auth': `Bearer ${heliKey}` }),
...(heliKey && { 'Helicone-Auth': `Bearer ${heliKey}`, 'Helicone-User-Id': userId }),
},
url: oaiHost + apiPath,
};
}

export function openAIChatCompletionPayload(model: ModelSchema, history: HistorySchema, functions: FunctionsSchema | null, n: number, stream: boolean): OpenAI.Wire.ChatCompletion.Request {
// User ID
const userId = model.userId || process.env.USER_ID || '';

return {
model: model.id,
messages: history,
...(functions && { functions: functions, function_call: 'auto' }),
...(model.temperature && { temperature: model.temperature }),
...(model.maxTokens && { max_tokens: model.maxTokens }),
...(userId && { user: userId }),
n,
stream,
};
Expand Down
4 changes: 3 additions & 1 deletion src/modules/llms/openai/openai.vendor.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { apiAsync } from '~/modules/trpc/trpc.client';

import { DLLM, ModelVendor } from '../llm.types';
import { VChatFunctionIn, VChatMessageIn, VChatMessageOrFunctionCallOut, VChatMessageOut } from '../llm.client';
import { DLLM, ModelVendor } from '../llm.types';

import { OpenAIIcon } from './OpenAIIcon';
import { OpenAILLMOptions } from './OpenAILLMOptions';
Expand All @@ -19,6 +19,7 @@ export interface SourceSetupOpenAI {
oaiHost: string; // use OpenAI-compatible non-default hosts (full origin path)
heliKey: string; // helicone key (works in conjunction with oaiHost)
moderationCheck: boolean;
userId: string; // user id for OpenAi and helicone if heliKey is present
}

export interface LLMOptionsOpenAI {
Expand All @@ -45,6 +46,7 @@ export const ModelVendorOpenAI: ModelVendor<SourceSetupOpenAI, LLMOptionsOpenAI>
oaiOrg: '',
oaiHost: '',
heliKey: '',
userId: '',
moderationCheck: false,
...partialSetup,
}),
Expand Down