From 2f1a2053f19c5920a4bb12ecb24a9d5364b27275 Mon Sep 17 00:00:00 2001 From: Ahmad Bilal Date: Fri, 25 Oct 2024 19:00:12 +0500 Subject: [PATCH] =?UTF-8?q?=F0=9F=93=A6=20NEW:=20Human=20in=20the=20loop?= =?UTF-8?q?=20agent=20example?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../.env.baseai.example | 21 +++++ .../agents/human-in-the-loop-agent/.gitignore | 9 +++ .../agents/human-in-the-loop-agent/README.md | 76 +++++++++++++++++++ .../baseai/baseai.config.ts | 18 +++++ .../pipes/human-in-the-loop-support-agent.ts | 49 ++++++++++++ .../baseai/tools/handoff-to-human.ts | 72 ++++++++++++++++++ .../agents/human-in-the-loop-agent/index.ts | 51 +++++++++++++ .../human-in-the-loop-agent/package.json | 22 ++++++ 8 files changed, 318 insertions(+) create mode 100644 examples/agents/human-in-the-loop-agent/.env.baseai.example create mode 100644 examples/agents/human-in-the-loop-agent/.gitignore create mode 100644 examples/agents/human-in-the-loop-agent/README.md create mode 100644 examples/agents/human-in-the-loop-agent/baseai/baseai.config.ts create mode 100644 examples/agents/human-in-the-loop-agent/baseai/pipes/human-in-the-loop-support-agent.ts create mode 100644 examples/agents/human-in-the-loop-agent/baseai/tools/handoff-to-human.ts create mode 100644 examples/agents/human-in-the-loop-agent/index.ts create mode 100644 examples/agents/human-in-the-loop-agent/package.json diff --git a/examples/agents/human-in-the-loop-agent/.env.baseai.example b/examples/agents/human-in-the-loop-agent/.env.baseai.example new file mode 100644 index 00000000..8c643651 --- /dev/null +++ b/examples/agents/human-in-the-loop-agent/.env.baseai.example @@ -0,0 +1,21 @@ +# !! SERVER SIDE ONLY !! +# Keep all your API keys secret — use only on the server side. + +# TODO: ADD: Both in your production and local env files. +# Langbase API key for your User or Org account. +# How to get this API key https://langbase.com/docs/api-reference/api-keys +LANGBASE_API_KEY= + +# TODO: ADD: LOCAL ONLY. Add only to local env files. +# Following keys are needed for local pipe runs. For providers you are using. +# For Langbase, please add the key to your LLM keysets. +# Read more: Langbase LLM Keysets https://langbase.com/docs/features/keysets +OPENAI_API_KEY= +ANTHROPIC_API_KEY= +COHERE_API_KEY= +FIREWORKS_API_KEY= +GOOGLE_API_KEY= +GROQ_API_KEY= +MISTRAL_API_KEY= +PERPLEXITY_API_KEY= +TOGETHER_API_KEY= diff --git a/examples/agents/human-in-the-loop-agent/.gitignore b/examples/agents/human-in-the-loop-agent/.gitignore new file mode 100644 index 00000000..9b5994f6 --- /dev/null +++ b/examples/agents/human-in-the-loop-agent/.gitignore @@ -0,0 +1,9 @@ +# baseai +**/.baseai/ +node_modules +.env +package-lock.json +pnpm-lock.yaml +# env file +.env + diff --git a/examples/agents/human-in-the-loop-agent/README.md b/examples/agents/human-in-the-loop-agent/README.md new file mode 100644 index 00000000..5cd7f059 --- /dev/null +++ b/examples/agents/human-in-the-loop-agent/README.md @@ -0,0 +1,76 @@ +![Human in the loop Agent by ⌘ BaseAI][cover] + +![License: MIT][mit] [![Fork on ⌘ Langbase][fork]][pipe] + +## Build a Human In The Loop (HITL) Agent for IT Support with BaseAI framework — ⌘ Langbase + +The **Human In The Loop (HITL) Agent** is a CLI-based application designed to handle IT support tasks efficiently while ensuring that complex or critical issues are escalated to human agents. This agent leverages a BaseAI pipe with tool calling capabilities, enabling it to interact dynamically with users, assess problem scenarios, and involve human agents when necessary. + +This AI Agent is built using the BaseAI framework. It leverages an agentic pipe that integrates over 30+ LLMs (including OpenAI, Gemini, Mistral, Llama, Gemma, etc.) and can handle any data, with context sizes of up to 10M+ tokens, supported by memory. The framework is compatible with any front-end framework (such as React, Remix, Astro, Next.js), giving you, as a developer, the freedom to tailor your AI application exactly as you envision. + +## How to use + +Navigate to `examples/agents/it-systems-triage-agent` and run the following commands: + +```sh +# Navigate to baseai/examples/agents/it-systems-triage-agent +cd examples/agents/it-systems-triage-agent + +# Install the dependencies +npm install + +# Make sure to copy .env.baseai.example file and +# create .env file and add all the relevant API keys in it +cp .env.baseai.example .env + +# Run the local baseai dev server to test the examples (uses localhost:9000 port) +npx baseai dev + +# Run the agent +tsx index.ts +``` + +## Features + +- Human In The Loop Agent for IT Support — Built with [BaseAI framework and agentic Pipe ⌘ ][qs] +- Composable Agents — build and compose agents with BaseAI +- Deploy this pipe and sync the deployed pipe on Langbase locally using `npx baseai@latest deploy` – [Learn more about deployment][deploy] + +## Learn more + +1. Check the [Learning path to build an agentic AI pipe with ⌘ BaseAI][learn] +2. Read the [source code on GitHub][gh] for this agent example +3. Go through Documentaion: [Pipe Quick Start][qs] +4. Learn more about [Memory features in ⌘ BaseAI][memory] +5. Learn more about [Tool calls support in ⌘ BaseAI][toolcalls] + + +> NOTE: +> This is a BaseAI project, you can deploy BaseAI pipes, memory and tool calls on Langbase. + +--- + +## Authors + +This project is created by [Langbase][lb] team members, with contributions from: + +- Muhammad-Ali Danish - Software Engineer, [Langbase][lb]
+**_Built by ⌘ [Langbase.com][lb] — Ship hyper-personalized AI assistants with memory!_** + +[lb]: https://langbase.com +[gh]: https://github.com/LangbaseInc/baseai/tree/main/examples/agents/it-systems-triage-agent +[cover]:https://raw.githubusercontent.com/LangbaseInc/docs-images/main/baseai/baseai-cover.png +[download]:https://download-directory.github.io/?url=https://github.com/LangbaseInc/baseai/tree/main/examples/it-systems-triage-agent +[learn]:https://baseai.dev/learn +[memory]:https://baseai.dev/docs/memory/quickstart +[toolcalls]:https://baseai.dev/docs/tools/quickstart +[deploy]:https://baseai.dev/docs/deployment/authentication +[signup]: https://langbase.fyi/io +[qs]:https://baseai.dev/docs/pipe/quickstart +[deploy]:https://baseai.dev/docs/deployment/authentication +[docs]:https://baseai.dev/docs +[xaa]:https://x.com/MrAhmadAwais +[xab]:https://x.com/AhmadBilalDev +[local]:http://localhost:9000 +[mit]: https://img.shields.io/badge/license-MIT-blue.svg?style=for-the-badge&color=%23000000 +[fork]: https://img.shields.io/badge/FORK%20ON-%E2%8C%98%20Langbase-000000.svg?style=for-the-badge&logo=%E2%8C%98%20Langbase&logoColor=000000 diff --git a/examples/agents/human-in-the-loop-agent/baseai/baseai.config.ts b/examples/agents/human-in-the-loop-agent/baseai/baseai.config.ts new file mode 100644 index 00000000..f0ee748f --- /dev/null +++ b/examples/agents/human-in-the-loop-agent/baseai/baseai.config.ts @@ -0,0 +1,18 @@ +import type { BaseAIConfig } from 'baseai'; + +export const config: BaseAIConfig = { + log: { + isEnabled: true, + logSensitiveData: false, + pipe: true, + 'pipe.completion': true, + 'pipe.request': true, + 'pipe.response': true, + tool: true, + memory: true + }, + memory: { + useLocalEmbeddings: false + }, + envFilePath: '.env' +}; diff --git a/examples/agents/human-in-the-loop-agent/baseai/pipes/human-in-the-loop-support-agent.ts b/examples/agents/human-in-the-loop-agent/baseai/pipes/human-in-the-loop-support-agent.ts new file mode 100644 index 00000000..3129b9e2 --- /dev/null +++ b/examples/agents/human-in-the-loop-agent/baseai/pipes/human-in-the-loop-support-agent.ts @@ -0,0 +1,49 @@ +import {PipeI} from '@baseai/core'; +import toolHandoffToHuman from '../tools/handoff-to-human'; + +const pipeHumanInTheLoopSupportAgent = (): PipeI => ({ + // Replace with your API key https://langbase.com/docs/api-reference/api-keys + apiKey: process.env.LANGBASE_API_KEY!, + name: 'human-in-the-loop-support-agent', + description: + 'An IT Support Human in the Loop (HITL) agent with ability to handoff to a human support resource based on issue severity or user request', + status: 'public', + model: 'openai:gpt-4o-mini', + stream: true, + json: false, + store: true, + moderate: true, + top_p: 1, + max_tokens: 1000, + temperature: 0.7, + presence_penalty: 1, + frequency_penalty: 1, + stop: [], + tool_choice: 'auto', + parallel_tool_calls: true, + messages: [ + { + role: 'system', + content: `You are an IT Support Escalation Agent. Your role is to assist users by gathering information, diagnosing common IT issues, and providing clear troubleshooting steps. When an issue requires human intervention, you will escalate it using the needs_human_approval tool. + +Guidelines: + +1. Collect Key Information: + Ask the user for details such as the problem description, affected systems, error messages, and steps already attempted. + +2. Diagnose and Suggest Solutions: + Offer basic troubleshooting steps in clear, easy-to-follow instructions. + +3. Assess Escalation Need: + If an issue is complex, critical, or unresolved after initial steps, call the handoff_to_human tool with information about the issue's severity, number of affected users, and urgency. + +4. Be Empathetic and Transparent: + If escalating, reassure the user and let them know a specialist will handle the issue promptly.`, + }, + ], + variables: [], + memory: [], + tools: [toolHandoffToHuman()], +}); + +export default pipeHumanInTheLoopSupportAgent; diff --git a/examples/agents/human-in-the-loop-agent/baseai/tools/handoff-to-human.ts b/examples/agents/human-in-the-loop-agent/baseai/tools/handoff-to-human.ts new file mode 100644 index 00000000..7aa244c5 --- /dev/null +++ b/examples/agents/human-in-the-loop-agent/baseai/tools/handoff-to-human.ts @@ -0,0 +1,72 @@ +import {ToolI} from '@baseai/core'; + +export async function handoffToHuman() { + // Add your tool logic here + // This function will be called when the tool is executed +} + +const toolHandoffToHuman = (): ToolI => ({ + run: handoffToHuman, + type: 'function' as const, + function: { + name: 'handoff_to_human', + description: + 'Generates a structured summary of an IT issue for human agents to quickly understand and take action.', + parameters: { + type: 'object', + properties: { + issue_title: { + type: 'string', + description: 'A brief title summarizing the core issue.', + }, + affected_systems: { + type: 'array', + items: { + type: 'string', + }, + description: + 'A list of systems, applications, or services impacted by the issue.', + }, + error_message: { + type: 'string', + description: + 'The specific error message(s) reported by the user or detected during troubleshooting.', + }, + number_of_users_affected: { + type: 'number', + description: + 'The estimated number of users impacted by this issue.', + }, + steps_attempted: { + type: 'array', + items: { + type: 'string', + }, + description: + 'A list of troubleshooting steps that the AI agent suggested and that the user has already tried.', + }, + severity_level: { + type: 'string', + enum: ['Low', 'Medium', 'High', 'Critical'], + description: + 'The severity of the issue based on the AI agent’s assessment.', + }, + additional_notes: { + type: 'string', + description: + 'Any other contextual information, observations, or details provided by the user that may help the human agent.', + }, + }, + required: [ + 'issue_title', + 'affected_systems', + 'error_message', + 'number_of_users_affected', + 'steps_attempted', + 'severity_level', + ], + }, + }, +}); + +export default toolHandoffToHuman; diff --git a/examples/agents/human-in-the-loop-agent/index.ts b/examples/agents/human-in-the-loop-agent/index.ts new file mode 100644 index 00000000..8fe900ba --- /dev/null +++ b/examples/agents/human-in-the-loop-agent/index.ts @@ -0,0 +1,51 @@ +import 'dotenv/config'; +import {Message, Pipe} from '@baseai/core'; +import inquirer from 'inquirer'; +import ora from 'ora'; +import chalk from 'chalk'; +import pipeHumanInTheLoopSupportAgent from './baseai/pipes/human-in-the-loop-support-agent'; + +const pipe = new Pipe(pipeHumanInTheLoopSupportAgent()); + +async function main() { + // Messages array for keeping track of the conversation + const messages: Message[] = []; + + while (true) { + const {userMsg} = await inquirer.prompt([ + { + type: 'input', + name: 'userMsg', + message: chalk.blue( + 'Enter your query (or type "exit" to quit):', + ), + }, + ]); + + if (userMsg.toLowerCase() === 'exit') { + console.log(chalk.green('Goodbye!')); + break; + } + + const spinner = ora('Processing your request...').start(); + messages.push({role: 'user', content: userMsg}); + + try { + const {completion} = await pipe.run({ + messages, + }); + messages.push({ + role: 'assistant', + content: completion, + }); + spinner.stop(); + console.log(chalk.cyan('Agent:')); + console.log(completion); + } catch (error) { + spinner.stop(); + console.error(chalk.red('Error processing your request:'), error); + } + } +} + +main(); diff --git a/examples/agents/human-in-the-loop-agent/package.json b/examples/agents/human-in-the-loop-agent/package.json new file mode 100644 index 00000000..402d2791 --- /dev/null +++ b/examples/agents/human-in-the-loop-agent/package.json @@ -0,0 +1,22 @@ +{ + "name": "it-systems-triage-agent", + "version": "1.0.0", + "main": "index.js", + "scripts": { + "baseai": "baseai" + }, + "keywords": [], + "author": "", + "license": "ISC", + "description": "", + "dependencies": { + "@baseai/core": "^0.9.19", + "chalk": "^5.3.0", + "dotenv": "^16.4.5", + "inquirer": "^12.0.0", + "ora": "^8.1.0" + }, + "devDependencies": { + "baseai": "^0.9.19" + } +}