Skip to content

Commit

Permalink
agent core setup
Browse files Browse the repository at this point in the history
  • Loading branch information
JerryPan2718 committed Jan 11, 2025
1 parent ce150a6 commit d6e0e9c
Show file tree
Hide file tree
Showing 17 changed files with 3,602 additions and 2,295 deletions.
36 changes: 36 additions & 0 deletions .github/workflows/core.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
name: Webapp

on:
push:
paths:
- "core/**"

jobs:
webapp:
defaults:
run:
working-directory: core

timeout-minutes: 60
runs-on: ubuntu-latest

steps:
- name: Set date
run: echo "CURRENT_DATE=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
working-directory: /tmp
- name: Check out code
uses: actions/checkout@v4
with:
lfs: true
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 20.10.0
- name: Install dependencies
run: npm install
- name: Run TypeScript check
run: npm run typecheck
- name: Run eslint check
run: npm run lint
- name: Run prettier check
run: npm run format-check
2 changes: 1 addition & 1 deletion SECURITY.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@ If you are deploying Digimon Engine, read [Whitepaper ](https://docs.digimon.tec

## Reporting a vulnerability

Please report security issues to `[email protected]`.
Please report security issues to `[email protected]`.
48 changes: 48 additions & 0 deletions core/agent/agent_factory.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import { BaseAgent, ModelConfig } from "./base_agent";
import { OpenAIAgent } from "./openai_agent";
import { AnthropicAgent } from "./anthropic_agent";
import { GeminiAgent } from "./gemini_agent";
import { MistralAgent } from "./mistral_agent";
import { OllamaAgent } from "./ollama_agent";
import { BedrockAgent } from "./bedrock_agent";
import { CohereAgent } from "./cohere_agent";
import { Claude3Agent } from "./claude3_agent";
import { PalmAgent } from "./palm_agent";

export type AgentType =
| "openai"
| "anthropic"
| "gemini"
| "mistral"
| "ollama"
| "bedrock"
| "cohere"
| "claude3"
| "palm";

export class AgentFactory {
static createAgent(type: AgentType, config: ModelConfig): BaseAgent {
switch (type) {
case "openai":
return new OpenAIAgent(config);
case "anthropic":
return new AnthropicAgent(config);
case "gemini":
return new GeminiAgent(config);
case "mistral":
return new MistralAgent(config);
case "ollama":
return new OllamaAgent(config);
case "bedrock":
return new BedrockAgent(config);
case "cohere":
return new CohereAgent(config);
case "claude3":
return new Claude3Agent(config);
case "palm":
return new PalmAgent(config);
default:
throw new Error(`Unknown agent type: ${type}`);
}
}
}
58 changes: 58 additions & 0 deletions core/agent/anthropic_agent.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import Anthropic from "@anthropic-ai/sdk";
import { BaseAgent, ChatMessage, ModelConfig } from "./base_agent";

export class AnthropicAgent implements BaseAgent {
private client: Anthropic;
modelName: string;
temperature: number;
maxTokens: number;

constructor(config: ModelConfig) {
this.client = new Anthropic({ apiKey: config.apiKey });
this.modelName = config.modelName;
this.temperature = config.temperature;
this.maxTokens = config.maxTokens;
}

async chat(messages: ChatMessage[]): Promise<string> {
const response = await this.client.messages.create({
model: this.modelName,
messages: this.formatMessages(messages),
temperature: this.temperature,
max_tokens: this.maxTokens,
});
return response.content[0].text;
}

async *stream(messages: ChatMessage[]): AsyncGenerator<string> {
const stream = await this.client.messages.create({
model: this.modelName,
messages: this.formatMessages(messages),
temperature: this.temperature,
max_tokens: this.maxTokens,
stream: true,
});

for await (const chunk of stream) {
if (chunk.type === "content_block_delta") {
yield chunk.delta.text;
}
}
}

private formatMessages(messages: ChatMessage[]): any[] {
return messages.map((msg) => ({
role: msg.role,
content: msg.content,
}));
}

getModelConfig(): ModelConfig {
return {
modelName: this.modelName,
temperature: this.temperature,
maxTokens: this.maxTokens,
apiKey: this.client.apiKey,
};
}
}
23 changes: 23 additions & 0 deletions core/agent/base_agent.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { ChatMessage, ModelConfig } from "../types";

export interface BaseAgent {
modelName: string;
temperature: number;
maxTokens: number;

chat(messages: ChatMessage[]): Promise<string>;
stream(messages: ChatMessage[]): AsyncGenerator<string>;
getModelConfig(): ModelConfig;
}

export interface ChatMessage {
role: "system" | "user" | "assistant";
content: string;
}

export interface ModelConfig {
modelName: string;
temperature: number;
maxTokens: number;
apiKey: string;
}
78 changes: 78 additions & 0 deletions core/agent/bedrock_agent.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import {
BedrockRuntimeClient,
InvokeModelCommand,
} from "@aws-sdk/client-bedrock-runtime";
import { BaseAgent, ChatMessage, ModelConfig } from "./base_agent";

export class BedrockAgent implements BaseAgent {
private client: BedrockRuntimeClient;
modelName: string;
temperature: number;
maxTokens: number;

constructor(config: ModelConfig) {
this.client = new BedrockRuntimeClient({
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID || "",
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || "",
},
region: process.env.AWS_REGION || "us-east-1",
});
this.modelName = config.modelName;
this.temperature = config.temperature;
this.maxTokens = config.maxTokens;
}

async chat(messages: ChatMessage[]): Promise<string> {
const command = new InvokeModelCommand({
modelId: this.modelName,
body: JSON.stringify({
prompt: this.formatMessages(messages),
max_tokens: this.maxTokens,
temperature: this.temperature,
}),
});

const response = await this.client.send(command);
const responseBody = JSON.parse(new TextDecoder().decode(response.body));
return responseBody.completion;
}

async *stream(messages: ChatMessage[]): AsyncGenerator<string> {
const command = new InvokeModelCommand({
modelId: this.modelName,
body: JSON.stringify({
prompt: this.formatMessages(messages),
max_tokens: this.maxTokens,
temperature: this.temperature,
stream: true,
}),
});

const response = await this.client.send(command);
const reader = response.body.getReader();

while (true) {
const { done, value } = await reader.read();
if (done) break;

const chunk = JSON.parse(new TextDecoder().decode(value));
if (chunk.completion) {
yield chunk.completion;
}
}
}

private formatMessages(messages: ChatMessage[]): string {
return messages.map((msg) => `${msg.role}: ${msg.content}`).join("\n");
}

getModelConfig(): ModelConfig {
return {
modelName: this.modelName,
temperature: this.temperature,
maxTokens: this.maxTokens,
apiKey: "",
};
}
}
58 changes: 58 additions & 0 deletions core/agent/claude3_agent.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import { Anthropic } from "@anthropic-ai/sdk";
import { BaseAgent, ChatMessage, ModelConfig } from "./base_agent";

export class Claude3Agent implements BaseAgent {
private client: Anthropic;
modelName: string;
temperature: number;
maxTokens: number;

constructor(config: ModelConfig) {
this.client = new Anthropic({ apiKey: config.apiKey });
this.modelName = config.modelName || "claude-3-opus-20240229";
this.temperature = config.temperature;
this.maxTokens = config.maxTokens;
}

async chat(messages: ChatMessage[]): Promise<string> {
const response = await this.client.messages.create({
model: this.modelName,
messages: this.formatMessages(messages),
temperature: this.temperature,
max_tokens: this.maxTokens,
});
return response.content[0].text;
}

async *stream(messages: ChatMessage[]): AsyncGenerator<string> {
const stream = await this.client.messages.create({
model: this.modelName,
messages: this.formatMessages(messages),
temperature: this.temperature,
max_tokens: this.maxTokens,
stream: true,
});

for await (const chunk of stream) {
if (chunk.type === "content_block_delta") {
yield chunk.delta.text;
}
}
}

private formatMessages(messages: ChatMessage[]): any[] {
return messages.map((msg) => ({
role: msg.role === "assistant" ? "assistant" : "user",
content: msg.content,
}));
}

getModelConfig(): ModelConfig {
return {
modelName: this.modelName,
temperature: this.temperature,
maxTokens: this.maxTokens,
apiKey: "",
};
}
}
55 changes: 55 additions & 0 deletions core/agent/cohere_agent.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import { CohereClient } from "cohere-ai";
import { BaseAgent, ChatMessage, ModelConfig } from "./base_agent";

export class CohereAgent implements BaseAgent {
private client: CohereClient;
modelName: string;
temperature: number;
maxTokens: number;

constructor(config: ModelConfig) {
this.client = new CohereClient({ apiKey: config.apiKey });
this.modelName = config.modelName;
this.temperature = config.temperature;
this.maxTokens = config.maxTokens;
}

async chat(messages: ChatMessage[]): Promise<string> {
const response = await this.client.chat({
model: this.modelName,
message: this.formatMessages(messages),
temperature: this.temperature,
max_tokens: this.maxTokens,
});
return response.text;
}

async *stream(messages: ChatMessage[]): AsyncGenerator<string> {
const stream = await this.client.chat({
model: this.modelName,
message: this.formatMessages(messages),
temperature: this.temperature,
max_tokens: this.maxTokens,
stream: true,
});

for await (const chunk of stream) {
if (chunk.text) {
yield chunk.text;
}
}
}

private formatMessages(messages: ChatMessage[]): string {
return messages.map((msg) => msg.content).join("\n");
}

getModelConfig(): ModelConfig {
return {
modelName: this.modelName,
temperature: this.temperature,
maxTokens: this.maxTokens,
apiKey: "",
};
}
}
Loading

0 comments on commit d6e0e9c

Please sign in to comment.