Skip to content

Commit

Permalink
agent core setup
Browse files Browse the repository at this point in the history
  • Loading branch information
JerryPan2718 committed Jan 11, 2025
1 parent ce150a6 commit 17a85dc
Show file tree
Hide file tree
Showing 19 changed files with 920 additions and 26 deletions.
36 changes: 36 additions & 0 deletions .github/workflows/core.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
name: Webapp

on:
push:
paths:
- "core/**"

jobs:
webapp:
defaults:
run:
working-directory: core

timeout-minutes: 60
runs-on: ubuntu-latest

steps:
- name: Set date
run: echo "CURRENT_DATE=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
working-directory: /tmp
- name: Check out code
uses: actions/checkout@v4
with:
lfs: true
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 20.10.0
- name: Install dependencies
run: npm install
- name: Run TypeScript check
run: npm run typecheck
- name: Run eslint check
run: npm run lint
- name: Run prettier check
run: npm run format-check
54 changes: 54 additions & 0 deletions core/agent/agent_factory.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import { BaseAgent, ModelConfig } from './base_agent';
import { OpenAIAgent } from './openai_agent';
import { AnthropicAgent } from './anthropic_agent';
import { GeminiAgent } from './gemini_agent';
import { GroqAgent } from './groq_agent';
import { MistralAgent } from './mistral_agent';
import { OllamaAgent } from './ollama_agent';
import { TogetherAgent } from './together_agent';
import { BedrockAgent } from './bedrock_agent';
import { CerebrasAgent } from './cerebras_agent';
import { CohereAgent } from './cohere_agent';
import { Claude3Agent } from './claude3_agent';
import { PalmAgent } from './palm_agent';
import { ReplicateAgent } from './replicate_agent';

export type AgentType =
'openai' | 'anthropic' | 'gemini' | 'groq' | 'mistral' |
'ollama' | 'together' | 'bedrock' | 'cerebras' | 'cohere' |
'claude3' | 'palm' | 'replicate';

export class AgentFactory {
static createAgent(type: AgentType, config: ModelConfig): BaseAgent {
switch (type) {
case 'openai':
return new OpenAIAgent(config);
case 'anthropic':
return new AnthropicAgent(config);
case 'gemini':
return new GeminiAgent(config);
case 'groq':
return new GroqAgent(config);
case 'mistral':
return new MistralAgent(config);
case 'ollama':
return new OllamaAgent(config);
case 'together':
return new TogetherAgent(config);
case 'bedrock':
return new BedrockAgent(config);
case 'cerebras':
return new CerebrasAgent(config);
case 'cohere':
return new CohereAgent(config);
case 'claude3':
return new Claude3Agent(config);
case 'palm':
return new PalmAgent(config);
case 'replicate':
return new ReplicateAgent(config);
default:
throw new Error(`Unknown agent type: ${type}`);
}
}
}
58 changes: 58 additions & 0 deletions core/agent/anthropic_agent.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import Anthropic from '@anthropic-ai/sdk';
import { BaseAgent, ChatMessage, ModelConfig } from './base_agent';

export class AnthropicAgent implements BaseAgent {
private client: Anthropic;
modelName: string;
temperature: number;
maxTokens: number;

constructor(config: ModelConfig) {
this.client = new Anthropic({ apiKey: config.apiKey });
this.modelName = config.modelName;
this.temperature = config.temperature;
this.maxTokens = config.maxTokens;
}

async chat(messages: ChatMessage[]): Promise<string> {
const response = await this.client.messages.create({
model: this.modelName,
messages: this.formatMessages(messages),
temperature: this.temperature,
max_tokens: this.maxTokens,
});
return response.content[0].text;
}

async *stream(messages: ChatMessage[]): AsyncGenerator<string> {
const stream = await this.client.messages.create({
model: this.modelName,
messages: this.formatMessages(messages),
temperature: this.temperature,
max_tokens: this.maxTokens,
stream: true,
});

for await (const chunk of stream) {
if (chunk.type === 'content_block_delta') {
yield chunk.delta.text;
}
}
}

private formatMessages(messages: ChatMessage[]): any[] {
return messages.map(msg => ({
role: msg.role,
content: msg.content,
}));
}

getModelConfig(): ModelConfig {
return {
modelName: this.modelName,
temperature: this.temperature,
maxTokens: this.maxTokens,
apiKey: this.client.apiKey,
};
}
}
23 changes: 23 additions & 0 deletions core/agent/base_agent.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { ChatMessage, ModelConfig } from '../types';

export interface BaseAgent {
modelName: string;
temperature: number;
maxTokens: number;

chat(messages: ChatMessage[]): Promise<string>;
stream(messages: ChatMessage[]): AsyncGenerator<string>;
getModelConfig(): ModelConfig;
}

export interface ChatMessage {
role: 'system' | 'user' | 'assistant';
content: string;
}

export interface ModelConfig {
modelName: string;
temperature: number;
maxTokens: number;
apiKey: string;
}
75 changes: 75 additions & 0 deletions core/agent/bedrock_agent.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
import { BedrockRuntimeClient, InvokeModelCommand } from '@aws-sdk/client-bedrock-runtime';
import { BaseAgent, ChatMessage, ModelConfig } from './base_agent';

export class BedrockAgent implements BaseAgent {
private client: BedrockRuntimeClient;
modelName: string;
temperature: number;
maxTokens: number;

constructor(config: ModelConfig) {
this.client = new BedrockRuntimeClient({
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID || '',
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || '',
},
region: process.env.AWS_REGION || 'us-east-1',
});
this.modelName = config.modelName;
this.temperature = config.temperature;
this.maxTokens = config.maxTokens;
}

async chat(messages: ChatMessage[]): Promise<string> {
const command = new InvokeModelCommand({
modelId: this.modelName,
body: JSON.stringify({
prompt: this.formatMessages(messages),
max_tokens: this.maxTokens,
temperature: this.temperature,
}),
});

const response = await this.client.send(command);
const responseBody = JSON.parse(new TextDecoder().decode(response.body));
return responseBody.completion;
}

async *stream(messages: ChatMessage[]): AsyncGenerator<string> {
const command = new InvokeModelCommand({
modelId: this.modelName,
body: JSON.stringify({
prompt: this.formatMessages(messages),
max_tokens: this.maxTokens,
temperature: this.temperature,
stream: true,
}),
});

const response = await this.client.send(command);
const reader = response.body.getReader();

while (true) {
const { done, value } = await reader.read();
if (done) break;

const chunk = JSON.parse(new TextDecoder().decode(value));
if (chunk.completion) {
yield chunk.completion;
}
}
}

private formatMessages(messages: ChatMessage[]): string {
return messages.map(msg => `${msg.role}: ${msg.content}`).join('\n');
}

getModelConfig(): ModelConfig {
return {
modelName: this.modelName,
temperature: this.temperature,
maxTokens: this.maxTokens,
apiKey: '',
};
}
}
55 changes: 55 additions & 0 deletions core/agent/cerebras_agent.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import { Cerebras } from '@cerebras/sdk';
import { BaseAgent, ChatMessage, ModelConfig } from './base_agent';

export class CerebrasAgent implements BaseAgent {
private client: Cerebras;
modelName: string;
temperature: number;
maxTokens: number;

constructor(config: ModelConfig) {
this.client = new Cerebras(config.apiKey);
this.modelName = config.modelName;
this.temperature = config.temperature;
this.maxTokens = config.maxTokens;
}

async chat(messages: ChatMessage[]): Promise<string> {
const response = await this.client.generate({
model: this.modelName,
prompt: this.formatMessages(messages),
temperature: this.temperature,
max_tokens: this.maxTokens,
});
return response.text;
}

async *stream(messages: ChatMessage[]): AsyncGenerator<string> {
const stream = await this.client.generate({
model: this.modelName,
prompt: this.formatMessages(messages),
temperature: this.temperature,
max_tokens: this.maxTokens,
stream: true,
});

for await (const chunk of stream) {
if (chunk.text) {
yield chunk.text;
}
}
}

private formatMessages(messages: ChatMessage[]): string {
return messages.map(msg => `${msg.role}: ${msg.content}`).join('\n');
}

getModelConfig(): ModelConfig {
return {
modelName: this.modelName,
temperature: this.temperature,
maxTokens: this.maxTokens,
apiKey: '',
};
}
}
58 changes: 58 additions & 0 deletions core/agent/claude3_agent.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import { Anthropic } from '@anthropic-ai/sdk';
import { BaseAgent, ChatMessage, ModelConfig } from './base_agent';

export class Claude3Agent implements BaseAgent {
private client: Anthropic;
modelName: string;
temperature: number;
maxTokens: number;

constructor(config: ModelConfig) {
this.client = new Anthropic({ apiKey: config.apiKey });
this.modelName = config.modelName || 'claude-3-opus-20240229';
this.temperature = config.temperature;
this.maxTokens = config.maxTokens;
}

async chat(messages: ChatMessage[]): Promise<string> {
const response = await this.client.messages.create({
model: this.modelName,
messages: this.formatMessages(messages),
temperature: this.temperature,
max_tokens: this.maxTokens,
});
return response.content[0].text;
}

async *stream(messages: ChatMessage[]): AsyncGenerator<string> {
const stream = await this.client.messages.create({
model: this.modelName,
messages: this.formatMessages(messages),
temperature: this.temperature,
max_tokens: this.maxTokens,
stream: true,
});

for await (const chunk of stream) {
if (chunk.type === 'content_block_delta') {
yield chunk.delta.text;
}
}
}

private formatMessages(messages: ChatMessage[]): any[] {
return messages.map(msg => ({
role: msg.role === 'assistant' ? 'assistant' : 'user',
content: msg.content,
}));
}

getModelConfig(): ModelConfig {
return {
modelName: this.modelName,
temperature: this.temperature,
maxTokens: this.maxTokens,
apiKey: '',
};
}
}
Loading

0 comments on commit 17a85dc

Please sign in to comment.