Skip to content

Refactor chatbot initialization to use Credal for LLM interactions #2857

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 11 commits into
base: integration/chat
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/few-penguins-sniff.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@leafygreen-ui/icon': minor
---

Adds `'Streaming'` glyph to the set
33 changes: 33 additions & 0 deletions apps/chatbot-server/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
PORT=3030

# MongoDB config
MONGODB_USER=<YOUR_MONGODB_USER>
MONGODB_PASSWORD=<YOUR_MONGODB_PASSWORD>
MONGODB_PROJECT_URL=<YOUR_PROJECT_URL>
MONGODB_APP_NAME=LeafyGreenAI

VECTOR_SEARCH_INDEX_NAME="vector_index" # or whatever your index name is
MONGODB_DATABASE_NAME="mongodb-chatbot-framework-chatbot" # or whatever your database name is. must contain vector search index.

# Azure OpenAI config
AZURE_OPENAI_API_KEY=<YOUR_AZURE_API_KEY1>
AZURE_OPENAI_API_KEY=<YOUR_AZURE_API_KEY2>
AZURE_OPENAI_ENDPOINT=https://<your-instance>.openai.azure.com/

AZURE_OPENAI_EMBEDDING_MODEL=text-embedding-ada-002
AZURE_OPENAI_API_EMBEDDING_DEPLOYMENT_NAME=text-embedding-ada-002
AZURE_OPENAI_API_EMBEDDING_DEPLOYMENT_URL=https://<your-instance>.openai.azure.com/openai/deployments/text-embedding-ada-002/embeddings?api-version=2023-05-15

AZURE_OPENAI_CHAT_COMPLETION_MODEL=gpt-4.1
AZURE_OPENAI_API_CHAT_COMPLETION_DEPLOYMENT_NAME=gpt-4.1
AZURE_OPENAI_API_CHAT_COMPLETION_DEPLOYMENT_URL=https://<your-instance>.openai.azure.com/openai/deployments/gpt-4.1/chat/completions?api-version=2025-01-01-preview
AZURE_OPENAI_CHAT_COMPLETION_MODEL=gpt-3.5-turbo

# Credal config
CREDAL_BASE_URL="https://rag.yourbaseurl.com/api"
CREDAL_API_TOKEN=<YOUR_CREDAL_API_TOKEN>
CREDAL_AGENT_ID=<YOUR_CREDAL_AGENT_ID>
CREDAL_USER_EMAIL=<YOUR_CREDAL_USER_EMAIL>

# Kanopy config
KANOPY_BINARY_PATH="/path/to/kanopy" # path to kanopy binary
133 changes: 133 additions & 0 deletions apps/chatbot-server/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*

# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json

# Runtime data
pids
*.pid
*.seed
*.pid.lock

# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov

# Coverage directory used by tools like istanbul
coverage
*.lcov

# nyc test coverage
.nyc_output

# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt

# Bower dependency directory (https://bower.io/)
bower_components

# node-waf configuration
.lock-wscript

# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release

# Dependency directories
node_modules/
jspm_packages/

# Snowpack dependency directory (https://snowpack.dev/)
web_modules/

# TypeScript cache
*.tsbuildinfo

# Optional npm cache directory
.npm

# Optional eslint cache
.eslintcache

# Optional stylelint cache
.stylelintcache

# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/

# Optional REPL history
.node_repl_history

# Output of 'npm pack'
*.tgz

# Yarn Integrity file
.yarn-integrity

# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local

# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache

# Next.js build output
.next
out

# Nuxt.js build / generate output
.nuxt
dist

# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public

# vuepress build output
.vuepress/dist

# vuepress v2.x temp and cache directory
.temp
.cache

# Docusaurus cache and generated files
.docusaurus

# Serverless directories
.serverless/

# FuseBox cache
.fusebox/

# DynamoDB Local files
.dynamodb/

# TernJS port file
.tern-port

# Stores VSCode versions used for testing VSCode extensions
.vscode-test

# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*

# ts build
build/
37 changes: 37 additions & 0 deletions apps/chatbot-server/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
{
"name": "lg-chatbot-server",
"version": "0.0.1",
"description": "",
"type": "module",
"main": "dist/index.js",
"module": "dist/esm/index.js",
"publishConfig": {
"access": "restricted"
},
"scripts": {
"build": "lg build-package",
"tsc": "lg build-ts",
"ingest": "pnpm build && ingest all --config ./dist/esm/ingest.config.js",
"dev": "tsx watch src/index.ts"
},
"keywords": [],
"author": "",
"license": "Apache-2.0",
"dependencies": {
"@credal/sdk": "^0.0.27",
"@emotion/css": "^11.13.5",
"@lg-tools/crawler": "workspace:^",
"dotenv": "^16.5.0",
"express": "^4.18.2",
"jsdom": "^26.1.0",
"lodash-es": "^4.17.21",
"mongodb-chatbot-server": "^0.11.0",
"mongodb-rag-core": "^0.7.0"
},
"devDependencies": {
"mongodb-rag-ingest": "^0.3.1",
"nodemon": "^3.0.1",
"tsx": "^4.19.4",
"typescript": "^5.8.0"
}
}
8 changes: 8 additions & 0 deletions apps/chatbot-server/rollup.config.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import { esmConfig, umdConfig } from '@lg-tools/build/config/rollup.config.mjs';

const ingestESMConfig = {
...esmConfig,
input: './src/ingest/ingest.config.ts',
};

export default [esmConfig, umdConfig, ingestESMConfig];
71 changes: 71 additions & 0 deletions apps/chatbot-server/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import {
AppConfig,
logger,
makeApp,
SystemPrompt,
} from 'mongodb-chatbot-server';

import { initChatBot } from './init';

// System prompt for chatbot
const systemPrompt: SystemPrompt = {
role: 'system',
content: `You are an assistant to engineers and product designers using the LeafyGreen design system.
Answer their questions about the framework in a friendly conversational tone.

For questions regarding engineering, and react components, provide code examples.
For questions regarding design and UX guidelines, provide sources.

Format your answers in Markdown.
Be concise in your answers.
`,
};

// Start the server and clean up resources on SIGINT.
const PORT = process.env.PORT || 3030;

const startServer = async () => {
const {
llm,
embeddedContentStore,
generateUserPrompt,
mongodbClient,
conversations,
} = await initChatBot();

// Create the MongoDB Chatbot Server Express.js app configuration
const config: AppConfig = {
conversationsRouterConfig: {
llm,
conversations,
generateUserPrompt,
systemPrompt,
},
maxRequestTimeoutMs: 30000,
};

logger.info('Starting server...');
const app = await makeApp(config);
const server = app.listen(PORT, () => {
logger.info(`Server listening on port: ${PORT}`);
});

process.on('SIGINT', async args => {
logger.info('SIGINT signal received', args);
await mongodbClient.close();
await embeddedContentStore.close();
await new Promise<void>((resolve, reject) => {
server.close((error: any) => {
error ? reject(error) : resolve();
});
});
process.exit(1);
});
};

try {
startServer();
} catch (e) {
logger.error(`Fatal error: ${e}`);
process.exit(1);
}
61 changes: 61 additions & 0 deletions apps/chatbot-server/src/ingest/ingest.config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import {
makeMongoDbEmbeddedContentStore,
makeMongoDbPageStore,
} from 'mongodb-rag-core';
import { Config, makeIngestMetaStore } from 'mongodb-rag-ingest';

import { loadEnvVars } from '../utils/loadEnv';
import { makeEmbedder } from '../utils/makeEmbedder';

import { leafygreenGithubSourceConstructor } from './sources/github-leafygreen-ui';
import { webSourceConstructor } from './utils/webSourceConstructor';

// Load project environment variables
const {
MONGODB_CONNECTION_URI,
MONGODB_DATABASE_NAME,
AZURE_OPENAI_EMBEDDING_MODEL,
} = loadEnvVars();

export default {
embedder: () => makeEmbedder(),
embeddedContentStore: () =>
makeMongoDbEmbeddedContentStore({
connectionUri: MONGODB_CONNECTION_URI,
databaseName: MONGODB_DATABASE_NAME,
searchIndex: {
embeddingName: AZURE_OPENAI_EMBEDDING_MODEL,
},
}),
pageStore: () =>
makeMongoDbPageStore({
connectionUri: MONGODB_CONNECTION_URI,
databaseName: MONGODB_DATABASE_NAME,
}),
ingestMetaStore: () =>
makeIngestMetaStore({
connectionUri: MONGODB_CONNECTION_URI,
databaseName: MONGODB_DATABASE_NAME,
entryId: 'all',
}),
chunkOptions: () => ({
minChunkSize: 15,
maxChunkSize: 1000,
overlap: 100,
}),
// Add data sources here
dataSources: async () => {
return Promise.all([
...[
'https://mongodb.design',
'https://react.dev/reference/react',
'https://developer.mozilla.org/en-US/docs/Web',
'https://css-tricks.com/category/articles',
'https://www.nngroup.com/articles',
'https://www.w3.org/WAI/standards-guidelines/wcag',
'https://atomicdesign.bradfrost.com/table-of-contents',
].map(source => webSourceConstructor(source, {})),
leafygreenGithubSourceConstructor(),
]);
},
} satisfies Config;
Loading
Loading